1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. storage
  5. TransferJob
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

gcp.storage.TransferJob

Explore with Pulumi AI

gcp logo
Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi

    Creates a new Transfer Job in Google Cloud Storage Transfer.

    To get more information about Google Cloud Storage Transfer, see:

    Example Usage

    Example creating a nightly Transfer Job from an AWS S3 Bucket to a GCS bucket.

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const default = gcp.storage.getTransferProjectServiceAccount({
        project: project,
    });
    const s3_backup_bucket = new gcp.storage.Bucket("s3-backup-bucket", {
        name: `${awsS3Bucket}-backup`,
        storageClass: "NEARLINE",
        project: project,
        location: "US",
    });
    const s3_backup_bucketBucketIAMMember = new gcp.storage.BucketIAMMember("s3-backup-bucket", {
        bucket: s3_backup_bucket.name,
        role: "roles/storage.admin",
        member: _default.then(_default => `serviceAccount:${_default.email}`),
    });
    const topic = new gcp.pubsub.Topic("topic", {name: pubsubTopicName});
    const notificationConfig = new gcp.pubsub.TopicIAMMember("notification_config", {
        topic: topic.id,
        role: "roles/pubsub.publisher",
        member: _default.then(_default => `serviceAccount:${_default.email}`),
    });
    const s3_bucket_nightly_backup = new gcp.storage.TransferJob("s3-bucket-nightly-backup", {
        description: "Nightly backup of S3 bucket",
        project: project,
        transferSpec: {
            objectConditions: {
                maxTimeElapsedSinceLastModification: "600s",
                excludePrefixes: ["requests.gz"],
            },
            transferOptions: {
                deleteObjectsUniqueInSink: false,
            },
            awsS3DataSource: {
                bucketName: awsS3Bucket,
                awsAccessKey: {
                    accessKeyId: awsAccessKey,
                    secretAccessKey: awsSecretKey,
                },
            },
            gcsDataSink: {
                bucketName: s3_backup_bucket.name,
                path: "foo/bar/",
            },
        },
        schedule: {
            scheduleStartDate: {
                year: 2018,
                month: 10,
                day: 1,
            },
            scheduleEndDate: {
                year: 2019,
                month: 1,
                day: 15,
            },
            startTimeOfDay: {
                hours: 23,
                minutes: 30,
                seconds: 0,
                nanos: 0,
            },
            repeatInterval: "604800s",
        },
        notificationConfig: {
            pubsubTopic: topic.id,
            eventTypes: [
                "TRANSFER_OPERATION_SUCCESS",
                "TRANSFER_OPERATION_FAILED",
            ],
            payloadFormat: "JSON",
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    default = gcp.storage.get_transfer_project_service_account(project=project)
    s3_backup_bucket = gcp.storage.Bucket("s3-backup-bucket",
        name=f"{aws_s3_bucket}-backup",
        storage_class="NEARLINE",
        project=project,
        location="US")
    s3_backup_bucket_bucket_iam_member = gcp.storage.BucketIAMMember("s3-backup-bucket",
        bucket=s3_backup_bucket.name,
        role="roles/storage.admin",
        member=f"serviceAccount:{default.email}")
    topic = gcp.pubsub.Topic("topic", name=pubsub_topic_name)
    notification_config = gcp.pubsub.TopicIAMMember("notification_config",
        topic=topic.id,
        role="roles/pubsub.publisher",
        member=f"serviceAccount:{default.email}")
    s3_bucket_nightly_backup = gcp.storage.TransferJob("s3-bucket-nightly-backup",
        description="Nightly backup of S3 bucket",
        project=project,
        transfer_spec=gcp.storage.TransferJobTransferSpecArgs(
            object_conditions=gcp.storage.TransferJobTransferSpecObjectConditionsArgs(
                max_time_elapsed_since_last_modification="600s",
                exclude_prefixes=["requests.gz"],
            ),
            transfer_options=gcp.storage.TransferJobTransferSpecTransferOptionsArgs(
                delete_objects_unique_in_sink=False,
            ),
            aws_s3_data_source=gcp.storage.TransferJobTransferSpecAwsS3DataSourceArgs(
                bucket_name=aws_s3_bucket,
                aws_access_key=gcp.storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs(
                    access_key_id=aws_access_key,
                    secret_access_key=aws_secret_key,
                ),
            ),
            gcs_data_sink=gcp.storage.TransferJobTransferSpecGcsDataSinkArgs(
                bucket_name=s3_backup_bucket.name,
                path="foo/bar/",
            ),
        ),
        schedule=gcp.storage.TransferJobScheduleArgs(
            schedule_start_date=gcp.storage.TransferJobScheduleScheduleStartDateArgs(
                year=2018,
                month=10,
                day=1,
            ),
            schedule_end_date=gcp.storage.TransferJobScheduleScheduleEndDateArgs(
                year=2019,
                month=1,
                day=15,
            ),
            start_time_of_day=gcp.storage.TransferJobScheduleStartTimeOfDayArgs(
                hours=23,
                minutes=30,
                seconds=0,
                nanos=0,
            ),
            repeat_interval="604800s",
        ),
        notification_config=gcp.storage.TransferJobNotificationConfigArgs(
            pubsub_topic=topic.id,
            event_types=[
                "TRANSFER_OPERATION_SUCCESS",
                "TRANSFER_OPERATION_FAILED",
            ],
            payload_format="JSON",
        ))
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/pubsub"
    	"github.com/pulumi/pulumi-gcp/sdk/v7/go/gcp/storage"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_default, err := storage.GetTransferProjectServiceAccount(ctx, &storage.GetTransferProjectServiceAccountArgs{
    			Project: pulumi.StringRef(project),
    		}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = storage.NewBucket(ctx, "s3-backup-bucket", &storage.BucketArgs{
    			Name:         pulumi.String(fmt.Sprintf("%v-backup", awsS3Bucket)),
    			StorageClass: pulumi.String("NEARLINE"),
    			Project:      pulumi.Any(project),
    			Location:     pulumi.String("US"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = storage.NewBucketIAMMember(ctx, "s3-backup-bucket", &storage.BucketIAMMemberArgs{
    			Bucket: s3_backup_bucket.Name,
    			Role:   pulumi.String("roles/storage.admin"),
    			Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
    		})
    		if err != nil {
    			return err
    		}
    		topic, err := pubsub.NewTopic(ctx, "topic", &pubsub.TopicArgs{
    			Name: pulumi.Any(pubsubTopicName),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = pubsub.NewTopicIAMMember(ctx, "notification_config", &pubsub.TopicIAMMemberArgs{
    			Topic:  topic.ID(),
    			Role:   pulumi.String("roles/pubsub.publisher"),
    			Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = storage.NewTransferJob(ctx, "s3-bucket-nightly-backup", &storage.TransferJobArgs{
    			Description: pulumi.String("Nightly backup of S3 bucket"),
    			Project:     pulumi.Any(project),
    			TransferSpec: &storage.TransferJobTransferSpecArgs{
    				ObjectConditions: &storage.TransferJobTransferSpecObjectConditionsArgs{
    					MaxTimeElapsedSinceLastModification: pulumi.String("600s"),
    					ExcludePrefixes: pulumi.StringArray{
    						pulumi.String("requests.gz"),
    					},
    				},
    				TransferOptions: &storage.TransferJobTransferSpecTransferOptionsArgs{
    					DeleteObjectsUniqueInSink: pulumi.Bool(false),
    				},
    				AwsS3DataSource: &storage.TransferJobTransferSpecAwsS3DataSourceArgs{
    					BucketName: pulumi.Any(awsS3Bucket),
    					AwsAccessKey: &storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs{
    						AccessKeyId:     pulumi.Any(awsAccessKey),
    						SecretAccessKey: pulumi.Any(awsSecretKey),
    					},
    				},
    				GcsDataSink: &storage.TransferJobTransferSpecGcsDataSinkArgs{
    					BucketName: s3_backup_bucket.Name,
    					Path:       pulumi.String("foo/bar/"),
    				},
    			},
    			Schedule: &storage.TransferJobScheduleArgs{
    				ScheduleStartDate: &storage.TransferJobScheduleScheduleStartDateArgs{
    					Year:  pulumi.Int(2018),
    					Month: pulumi.Int(10),
    					Day:   pulumi.Int(1),
    				},
    				ScheduleEndDate: &storage.TransferJobScheduleScheduleEndDateArgs{
    					Year:  pulumi.Int(2019),
    					Month: pulumi.Int(1),
    					Day:   pulumi.Int(15),
    				},
    				StartTimeOfDay: &storage.TransferJobScheduleStartTimeOfDayArgs{
    					Hours:   pulumi.Int(23),
    					Minutes: pulumi.Int(30),
    					Seconds: pulumi.Int(0),
    					Nanos:   pulumi.Int(0),
    				},
    				RepeatInterval: pulumi.String("604800s"),
    			},
    			NotificationConfig: &storage.TransferJobNotificationConfigArgs{
    				PubsubTopic: topic.ID(),
    				EventTypes: pulumi.StringArray{
    					pulumi.String("TRANSFER_OPERATION_SUCCESS"),
    					pulumi.String("TRANSFER_OPERATION_FAILED"),
    				},
    				PayloadFormat: pulumi.String("JSON"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var @default = Gcp.Storage.GetTransferProjectServiceAccount.Invoke(new()
        {
            Project = project,
        });
    
        var s3_backup_bucket = new Gcp.Storage.Bucket("s3-backup-bucket", new()
        {
            Name = $"{awsS3Bucket}-backup",
            StorageClass = "NEARLINE",
            Project = project,
            Location = "US",
        });
    
        var s3_backup_bucketBucketIAMMember = new Gcp.Storage.BucketIAMMember("s3-backup-bucket", new()
        {
            Bucket = s3_backup_bucket.Name,
            Role = "roles/storage.admin",
            Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
        });
    
        var topic = new Gcp.PubSub.Topic("topic", new()
        {
            Name = pubsubTopicName,
        });
    
        var notificationConfig = new Gcp.PubSub.TopicIAMMember("notification_config", new()
        {
            Topic = topic.Id,
            Role = "roles/pubsub.publisher",
            Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServiceAccountResult => getTransferProjectServiceAccountResult.Email)}"),
        });
    
        var s3_bucket_nightly_backup = new Gcp.Storage.TransferJob("s3-bucket-nightly-backup", new()
        {
            Description = "Nightly backup of S3 bucket",
            Project = project,
            TransferSpec = new Gcp.Storage.Inputs.TransferJobTransferSpecArgs
            {
                ObjectConditions = new Gcp.Storage.Inputs.TransferJobTransferSpecObjectConditionsArgs
                {
                    MaxTimeElapsedSinceLastModification = "600s",
                    ExcludePrefixes = new[]
                    {
                        "requests.gz",
                    },
                },
                TransferOptions = new Gcp.Storage.Inputs.TransferJobTransferSpecTransferOptionsArgs
                {
                    DeleteObjectsUniqueInSink = false,
                },
                AwsS3DataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceArgs
                {
                    BucketName = awsS3Bucket,
                    AwsAccessKey = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs
                    {
                        AccessKeyId = awsAccessKey,
                        SecretAccessKey = awsSecretKey,
                    },
                },
                GcsDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSinkArgs
                {
                    BucketName = s3_backup_bucket.Name,
                    Path = "foo/bar/",
                },
            },
            Schedule = new Gcp.Storage.Inputs.TransferJobScheduleArgs
            {
                ScheduleStartDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleStartDateArgs
                {
                    Year = 2018,
                    Month = 10,
                    Day = 1,
                },
                ScheduleEndDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleEndDateArgs
                {
                    Year = 2019,
                    Month = 1,
                    Day = 15,
                },
                StartTimeOfDay = new Gcp.Storage.Inputs.TransferJobScheduleStartTimeOfDayArgs
                {
                    Hours = 23,
                    Minutes = 30,
                    Seconds = 0,
                    Nanos = 0,
                },
                RepeatInterval = "604800s",
            },
            NotificationConfig = new Gcp.Storage.Inputs.TransferJobNotificationConfigArgs
            {
                PubsubTopic = topic.Id,
                EventTypes = new[]
                {
                    "TRANSFER_OPERATION_SUCCESS",
                    "TRANSFER_OPERATION_FAILED",
                },
                PayloadFormat = "JSON",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.storage.StorageFunctions;
    import com.pulumi.gcp.storage.inputs.GetTransferProjectServiceAccountArgs;
    import com.pulumi.gcp.storage.Bucket;
    import com.pulumi.gcp.storage.BucketArgs;
    import com.pulumi.gcp.storage.BucketIAMMember;
    import com.pulumi.gcp.storage.BucketIAMMemberArgs;
    import com.pulumi.gcp.pubsub.Topic;
    import com.pulumi.gcp.pubsub.TopicArgs;
    import com.pulumi.gcp.pubsub.TopicIAMMember;
    import com.pulumi.gcp.pubsub.TopicIAMMemberArgs;
    import com.pulumi.gcp.storage.TransferJob;
    import com.pulumi.gcp.storage.TransferJobArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecObjectConditionsArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecTransferOptionsArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecGcsDataSinkArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobScheduleArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleStartDateArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleEndDateArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobScheduleStartTimeOfDayArgs;
    import com.pulumi.gcp.storage.inputs.TransferJobNotificationConfigArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var default = StorageFunctions.getTransferProjectServiceAccount(GetTransferProjectServiceAccountArgs.builder()
                .project(project)
                .build());
    
            var s3_backup_bucket = new Bucket("s3-backup-bucket", BucketArgs.builder()        
                .name(String.format("%s-backup", awsS3Bucket))
                .storageClass("NEARLINE")
                .project(project)
                .location("US")
                .build());
    
            var s3_backup_bucketBucketIAMMember = new BucketIAMMember("s3-backup-bucketBucketIAMMember", BucketIAMMemberArgs.builder()        
                .bucket(s3_backup_bucket.name())
                .role("roles/storage.admin")
                .member(String.format("serviceAccount:%s", default_.email()))
                .build());
    
            var topic = new Topic("topic", TopicArgs.builder()        
                .name(pubsubTopicName)
                .build());
    
            var notificationConfig = new TopicIAMMember("notificationConfig", TopicIAMMemberArgs.builder()        
                .topic(topic.id())
                .role("roles/pubsub.publisher")
                .member(String.format("serviceAccount:%s", default_.email()))
                .build());
    
            var s3_bucket_nightly_backup = new TransferJob("s3-bucket-nightly-backup", TransferJobArgs.builder()        
                .description("Nightly backup of S3 bucket")
                .project(project)
                .transferSpec(TransferJobTransferSpecArgs.builder()
                    .objectConditions(TransferJobTransferSpecObjectConditionsArgs.builder()
                        .maxTimeElapsedSinceLastModification("600s")
                        .excludePrefixes("requests.gz")
                        .build())
                    .transferOptions(TransferJobTransferSpecTransferOptionsArgs.builder()
                        .deleteObjectsUniqueInSink(false)
                        .build())
                    .awsS3DataSource(TransferJobTransferSpecAwsS3DataSourceArgs.builder()
                        .bucketName(awsS3Bucket)
                        .awsAccessKey(TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs.builder()
                            .accessKeyId(awsAccessKey)
                            .secretAccessKey(awsSecretKey)
                            .build())
                        .build())
                    .gcsDataSink(TransferJobTransferSpecGcsDataSinkArgs.builder()
                        .bucketName(s3_backup_bucket.name())
                        .path("foo/bar/")
                        .build())
                    .build())
                .schedule(TransferJobScheduleArgs.builder()
                    .scheduleStartDate(TransferJobScheduleScheduleStartDateArgs.builder()
                        .year(2018)
                        .month(10)
                        .day(1)
                        .build())
                    .scheduleEndDate(TransferJobScheduleScheduleEndDateArgs.builder()
                        .year(2019)
                        .month(1)
                        .day(15)
                        .build())
                    .startTimeOfDay(TransferJobScheduleStartTimeOfDayArgs.builder()
                        .hours(23)
                        .minutes(30)
                        .seconds(0)
                        .nanos(0)
                        .build())
                    .repeatInterval("604800s")
                    .build())
                .notificationConfig(TransferJobNotificationConfigArgs.builder()
                    .pubsubTopic(topic.id())
                    .eventTypes(                
                        "TRANSFER_OPERATION_SUCCESS",
                        "TRANSFER_OPERATION_FAILED")
                    .payloadFormat("JSON")
                    .build())
                .build());
    
        }
    }
    
    resources:
      s3-backup-bucket:
        type: gcp:storage:Bucket
        properties:
          name: ${awsS3Bucket}-backup
          storageClass: NEARLINE
          project: ${project}
          location: US
      s3-backup-bucketBucketIAMMember:
        type: gcp:storage:BucketIAMMember
        name: s3-backup-bucket
        properties:
          bucket: ${["s3-backup-bucket"].name}
          role: roles/storage.admin
          member: serviceAccount:${default.email}
      topic:
        type: gcp:pubsub:Topic
        properties:
          name: ${pubsubTopicName}
      notificationConfig:
        type: gcp:pubsub:TopicIAMMember
        name: notification_config
        properties:
          topic: ${topic.id}
          role: roles/pubsub.publisher
          member: serviceAccount:${default.email}
      s3-bucket-nightly-backup:
        type: gcp:storage:TransferJob
        properties:
          description: Nightly backup of S3 bucket
          project: ${project}
          transferSpec:
            objectConditions:
              maxTimeElapsedSinceLastModification: 600s
              excludePrefixes:
                - requests.gz
            transferOptions:
              deleteObjectsUniqueInSink: false
            awsS3DataSource:
              bucketName: ${awsS3Bucket}
              awsAccessKey:
                accessKeyId: ${awsAccessKey}
                secretAccessKey: ${awsSecretKey}
            gcsDataSink:
              bucketName: ${["s3-backup-bucket"].name}
              path: foo/bar/
          schedule:
            scheduleStartDate:
              year: 2018
              month: 10
              day: 1
            scheduleEndDate:
              year: 2019
              month: 1
              day: 15
            startTimeOfDay:
              hours: 23
              minutes: 30
              seconds: 0
              nanos: 0
            repeatInterval: 604800s
          notificationConfig:
            pubsubTopic: ${topic.id}
            eventTypes:
              - TRANSFER_OPERATION_SUCCESS
              - TRANSFER_OPERATION_FAILED
            payloadFormat: JSON
    variables:
      default:
        fn::invoke:
          Function: gcp:storage:getTransferProjectServiceAccount
          Arguments:
            project: ${project}
    

    Create TransferJob Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new TransferJob(name: string, args: TransferJobArgs, opts?: CustomResourceOptions);
    @overload
    def TransferJob(resource_name: str,
                    args: TransferJobArgs,
                    opts: Optional[ResourceOptions] = None)
    
    @overload
    def TransferJob(resource_name: str,
                    opts: Optional[ResourceOptions] = None,
                    description: Optional[str] = None,
                    transfer_spec: Optional[TransferJobTransferSpecArgs] = None,
                    event_stream: Optional[TransferJobEventStreamArgs] = None,
                    name: Optional[str] = None,
                    notification_config: Optional[TransferJobNotificationConfigArgs] = None,
                    project: Optional[str] = None,
                    schedule: Optional[TransferJobScheduleArgs] = None,
                    status: Optional[str] = None)
    func NewTransferJob(ctx *Context, name string, args TransferJobArgs, opts ...ResourceOption) (*TransferJob, error)
    public TransferJob(string name, TransferJobArgs args, CustomResourceOptions? opts = null)
    public TransferJob(String name, TransferJobArgs args)
    public TransferJob(String name, TransferJobArgs args, CustomResourceOptions options)
    
    type: gcp:storage:TransferJob
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args TransferJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args TransferJobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args TransferJobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args TransferJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args TransferJobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var transferJobResource = new Gcp.Storage.TransferJob("transferJobResource", new()
    {
        Description = "string",
        TransferSpec = new Gcp.Storage.Inputs.TransferJobTransferSpecArgs
        {
            AwsS3DataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceArgs
            {
                BucketName = "string",
                AwsAccessKey = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs
                {
                    AccessKeyId = "string",
                    SecretAccessKey = "string",
                },
                Path = "string",
                RoleArn = "string",
            },
            AzureBlobStorageDataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAzureBlobStorageDataSourceArgs
            {
                Container = "string",
                StorageAccount = "string",
                AzureCredentials = new Gcp.Storage.Inputs.TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentialsArgs
                {
                    SasToken = "string",
                },
                CredentialsSecret = "string",
                Path = "string",
            },
            GcsDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSinkArgs
            {
                BucketName = "string",
                Path = "string",
            },
            GcsDataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSourceArgs
            {
                BucketName = "string",
                Path = "string",
            },
            HttpDataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecHttpDataSourceArgs
            {
                ListUrl = "string",
            },
            ObjectConditions = new Gcp.Storage.Inputs.TransferJobTransferSpecObjectConditionsArgs
            {
                ExcludePrefixes = new[]
                {
                    "string",
                },
                IncludePrefixes = new[]
                {
                    "string",
                },
                LastModifiedBefore = "string",
                LastModifiedSince = "string",
                MaxTimeElapsedSinceLastModification = "string",
                MinTimeElapsedSinceLastModification = "string",
            },
            PosixDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecPosixDataSinkArgs
            {
                RootDirectory = "string",
            },
            PosixDataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecPosixDataSourceArgs
            {
                RootDirectory = "string",
            },
            SinkAgentPoolName = "string",
            SourceAgentPoolName = "string",
            TransferOptions = new Gcp.Storage.Inputs.TransferJobTransferSpecTransferOptionsArgs
            {
                DeleteObjectsFromSourceAfterTransfer = false,
                DeleteObjectsUniqueInSink = false,
                OverwriteObjectsAlreadyExistingInSink = false,
                OverwriteWhen = "string",
            },
        },
        EventStream = new Gcp.Storage.Inputs.TransferJobEventStreamArgs
        {
            Name = "string",
            EventStreamExpirationTime = "string",
            EventStreamStartTime = "string",
        },
        Name = "string",
        NotificationConfig = new Gcp.Storage.Inputs.TransferJobNotificationConfigArgs
        {
            PayloadFormat = "string",
            PubsubTopic = "string",
            EventTypes = new[]
            {
                "string",
            },
        },
        Project = "string",
        Schedule = new Gcp.Storage.Inputs.TransferJobScheduleArgs
        {
            ScheduleStartDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleStartDateArgs
            {
                Day = 0,
                Month = 0,
                Year = 0,
            },
            RepeatInterval = "string",
            ScheduleEndDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleEndDateArgs
            {
                Day = 0,
                Month = 0,
                Year = 0,
            },
            StartTimeOfDay = new Gcp.Storage.Inputs.TransferJobScheduleStartTimeOfDayArgs
            {
                Hours = 0,
                Minutes = 0,
                Nanos = 0,
                Seconds = 0,
            },
        },
        Status = "string",
    });
    
    example, err := storage.NewTransferJob(ctx, "transferJobResource", &storage.TransferJobArgs{
    	Description: pulumi.String("string"),
    	TransferSpec: &storage.TransferJobTransferSpecArgs{
    		AwsS3DataSource: &storage.TransferJobTransferSpecAwsS3DataSourceArgs{
    			BucketName: pulumi.String("string"),
    			AwsAccessKey: &storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs{
    				AccessKeyId:     pulumi.String("string"),
    				SecretAccessKey: pulumi.String("string"),
    			},
    			Path:    pulumi.String("string"),
    			RoleArn: pulumi.String("string"),
    		},
    		AzureBlobStorageDataSource: &storage.TransferJobTransferSpecAzureBlobStorageDataSourceArgs{
    			Container:      pulumi.String("string"),
    			StorageAccount: pulumi.String("string"),
    			AzureCredentials: &storage.TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentialsArgs{
    				SasToken: pulumi.String("string"),
    			},
    			CredentialsSecret: pulumi.String("string"),
    			Path:              pulumi.String("string"),
    		},
    		GcsDataSink: &storage.TransferJobTransferSpecGcsDataSinkArgs{
    			BucketName: pulumi.String("string"),
    			Path:       pulumi.String("string"),
    		},
    		GcsDataSource: &storage.TransferJobTransferSpecGcsDataSourceArgs{
    			BucketName: pulumi.String("string"),
    			Path:       pulumi.String("string"),
    		},
    		HttpDataSource: &storage.TransferJobTransferSpecHttpDataSourceArgs{
    			ListUrl: pulumi.String("string"),
    		},
    		ObjectConditions: &storage.TransferJobTransferSpecObjectConditionsArgs{
    			ExcludePrefixes: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			IncludePrefixes: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			LastModifiedBefore:                  pulumi.String("string"),
    			LastModifiedSince:                   pulumi.String("string"),
    			MaxTimeElapsedSinceLastModification: pulumi.String("string"),
    			MinTimeElapsedSinceLastModification: pulumi.String("string"),
    		},
    		PosixDataSink: &storage.TransferJobTransferSpecPosixDataSinkArgs{
    			RootDirectory: pulumi.String("string"),
    		},
    		PosixDataSource: &storage.TransferJobTransferSpecPosixDataSourceArgs{
    			RootDirectory: pulumi.String("string"),
    		},
    		SinkAgentPoolName:   pulumi.String("string"),
    		SourceAgentPoolName: pulumi.String("string"),
    		TransferOptions: &storage.TransferJobTransferSpecTransferOptionsArgs{
    			DeleteObjectsFromSourceAfterTransfer:  pulumi.Bool(false),
    			DeleteObjectsUniqueInSink:             pulumi.Bool(false),
    			OverwriteObjectsAlreadyExistingInSink: pulumi.Bool(false),
    			OverwriteWhen:                         pulumi.String("string"),
    		},
    	},
    	EventStream: &storage.TransferJobEventStreamArgs{
    		Name:                      pulumi.String("string"),
    		EventStreamExpirationTime: pulumi.String("string"),
    		EventStreamStartTime:      pulumi.String("string"),
    	},
    	Name: pulumi.String("string"),
    	NotificationConfig: &storage.TransferJobNotificationConfigArgs{
    		PayloadFormat: pulumi.String("string"),
    		PubsubTopic:   pulumi.String("string"),
    		EventTypes: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    	},
    	Project: pulumi.String("string"),
    	Schedule: &storage.TransferJobScheduleArgs{
    		ScheduleStartDate: &storage.TransferJobScheduleScheduleStartDateArgs{
    			Day:   pulumi.Int(0),
    			Month: pulumi.Int(0),
    			Year:  pulumi.Int(0),
    		},
    		RepeatInterval: pulumi.String("string"),
    		ScheduleEndDate: &storage.TransferJobScheduleScheduleEndDateArgs{
    			Day:   pulumi.Int(0),
    			Month: pulumi.Int(0),
    			Year:  pulumi.Int(0),
    		},
    		StartTimeOfDay: &storage.TransferJobScheduleStartTimeOfDayArgs{
    			Hours:   pulumi.Int(0),
    			Minutes: pulumi.Int(0),
    			Nanos:   pulumi.Int(0),
    			Seconds: pulumi.Int(0),
    		},
    	},
    	Status: pulumi.String("string"),
    })
    
    var transferJobResource = new TransferJob("transferJobResource", TransferJobArgs.builder()        
        .description("string")
        .transferSpec(TransferJobTransferSpecArgs.builder()
            .awsS3DataSource(TransferJobTransferSpecAwsS3DataSourceArgs.builder()
                .bucketName("string")
                .awsAccessKey(TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs.builder()
                    .accessKeyId("string")
                    .secretAccessKey("string")
                    .build())
                .path("string")
                .roleArn("string")
                .build())
            .azureBlobStorageDataSource(TransferJobTransferSpecAzureBlobStorageDataSourceArgs.builder()
                .container("string")
                .storageAccount("string")
                .azureCredentials(TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentialsArgs.builder()
                    .sasToken("string")
                    .build())
                .credentialsSecret("string")
                .path("string")
                .build())
            .gcsDataSink(TransferJobTransferSpecGcsDataSinkArgs.builder()
                .bucketName("string")
                .path("string")
                .build())
            .gcsDataSource(TransferJobTransferSpecGcsDataSourceArgs.builder()
                .bucketName("string")
                .path("string")
                .build())
            .httpDataSource(TransferJobTransferSpecHttpDataSourceArgs.builder()
                .listUrl("string")
                .build())
            .objectConditions(TransferJobTransferSpecObjectConditionsArgs.builder()
                .excludePrefixes("string")
                .includePrefixes("string")
                .lastModifiedBefore("string")
                .lastModifiedSince("string")
                .maxTimeElapsedSinceLastModification("string")
                .minTimeElapsedSinceLastModification("string")
                .build())
            .posixDataSink(TransferJobTransferSpecPosixDataSinkArgs.builder()
                .rootDirectory("string")
                .build())
            .posixDataSource(TransferJobTransferSpecPosixDataSourceArgs.builder()
                .rootDirectory("string")
                .build())
            .sinkAgentPoolName("string")
            .sourceAgentPoolName("string")
            .transferOptions(TransferJobTransferSpecTransferOptionsArgs.builder()
                .deleteObjectsFromSourceAfterTransfer(false)
                .deleteObjectsUniqueInSink(false)
                .overwriteObjectsAlreadyExistingInSink(false)
                .overwriteWhen("string")
                .build())
            .build())
        .eventStream(TransferJobEventStreamArgs.builder()
            .name("string")
            .eventStreamExpirationTime("string")
            .eventStreamStartTime("string")
            .build())
        .name("string")
        .notificationConfig(TransferJobNotificationConfigArgs.builder()
            .payloadFormat("string")
            .pubsubTopic("string")
            .eventTypes("string")
            .build())
        .project("string")
        .schedule(TransferJobScheduleArgs.builder()
            .scheduleStartDate(TransferJobScheduleScheduleStartDateArgs.builder()
                .day(0)
                .month(0)
                .year(0)
                .build())
            .repeatInterval("string")
            .scheduleEndDate(TransferJobScheduleScheduleEndDateArgs.builder()
                .day(0)
                .month(0)
                .year(0)
                .build())
            .startTimeOfDay(TransferJobScheduleStartTimeOfDayArgs.builder()
                .hours(0)
                .minutes(0)
                .nanos(0)
                .seconds(0)
                .build())
            .build())
        .status("string")
        .build());
    
    transfer_job_resource = gcp.storage.TransferJob("transferJobResource",
        description="string",
        transfer_spec=gcp.storage.TransferJobTransferSpecArgs(
            aws_s3_data_source=gcp.storage.TransferJobTransferSpecAwsS3DataSourceArgs(
                bucket_name="string",
                aws_access_key=gcp.storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs(
                    access_key_id="string",
                    secret_access_key="string",
                ),
                path="string",
                role_arn="string",
            ),
            azure_blob_storage_data_source=gcp.storage.TransferJobTransferSpecAzureBlobStorageDataSourceArgs(
                container="string",
                storage_account="string",
                azure_credentials=gcp.storage.TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentialsArgs(
                    sas_token="string",
                ),
                credentials_secret="string",
                path="string",
            ),
            gcs_data_sink=gcp.storage.TransferJobTransferSpecGcsDataSinkArgs(
                bucket_name="string",
                path="string",
            ),
            gcs_data_source=gcp.storage.TransferJobTransferSpecGcsDataSourceArgs(
                bucket_name="string",
                path="string",
            ),
            http_data_source=gcp.storage.TransferJobTransferSpecHttpDataSourceArgs(
                list_url="string",
            ),
            object_conditions=gcp.storage.TransferJobTransferSpecObjectConditionsArgs(
                exclude_prefixes=["string"],
                include_prefixes=["string"],
                last_modified_before="string",
                last_modified_since="string",
                max_time_elapsed_since_last_modification="string",
                min_time_elapsed_since_last_modification="string",
            ),
            posix_data_sink=gcp.storage.TransferJobTransferSpecPosixDataSinkArgs(
                root_directory="string",
            ),
            posix_data_source=gcp.storage.TransferJobTransferSpecPosixDataSourceArgs(
                root_directory="string",
            ),
            sink_agent_pool_name="string",
            source_agent_pool_name="string",
            transfer_options=gcp.storage.TransferJobTransferSpecTransferOptionsArgs(
                delete_objects_from_source_after_transfer=False,
                delete_objects_unique_in_sink=False,
                overwrite_objects_already_existing_in_sink=False,
                overwrite_when="string",
            ),
        ),
        event_stream=gcp.storage.TransferJobEventStreamArgs(
            name="string",
            event_stream_expiration_time="string",
            event_stream_start_time="string",
        ),
        name="string",
        notification_config=gcp.storage.TransferJobNotificationConfigArgs(
            payload_format="string",
            pubsub_topic="string",
            event_types=["string"],
        ),
        project="string",
        schedule=gcp.storage.TransferJobScheduleArgs(
            schedule_start_date=gcp.storage.TransferJobScheduleScheduleStartDateArgs(
                day=0,
                month=0,
                year=0,
            ),
            repeat_interval="string",
            schedule_end_date=gcp.storage.TransferJobScheduleScheduleEndDateArgs(
                day=0,
                month=0,
                year=0,
            ),
            start_time_of_day=gcp.storage.TransferJobScheduleStartTimeOfDayArgs(
                hours=0,
                minutes=0,
                nanos=0,
                seconds=0,
            ),
        ),
        status="string")
    
    const transferJobResource = new gcp.storage.TransferJob("transferJobResource", {
        description: "string",
        transferSpec: {
            awsS3DataSource: {
                bucketName: "string",
                awsAccessKey: {
                    accessKeyId: "string",
                    secretAccessKey: "string",
                },
                path: "string",
                roleArn: "string",
            },
            azureBlobStorageDataSource: {
                container: "string",
                storageAccount: "string",
                azureCredentials: {
                    sasToken: "string",
                },
                credentialsSecret: "string",
                path: "string",
            },
            gcsDataSink: {
                bucketName: "string",
                path: "string",
            },
            gcsDataSource: {
                bucketName: "string",
                path: "string",
            },
            httpDataSource: {
                listUrl: "string",
            },
            objectConditions: {
                excludePrefixes: ["string"],
                includePrefixes: ["string"],
                lastModifiedBefore: "string",
                lastModifiedSince: "string",
                maxTimeElapsedSinceLastModification: "string",
                minTimeElapsedSinceLastModification: "string",
            },
            posixDataSink: {
                rootDirectory: "string",
            },
            posixDataSource: {
                rootDirectory: "string",
            },
            sinkAgentPoolName: "string",
            sourceAgentPoolName: "string",
            transferOptions: {
                deleteObjectsFromSourceAfterTransfer: false,
                deleteObjectsUniqueInSink: false,
                overwriteObjectsAlreadyExistingInSink: false,
                overwriteWhen: "string",
            },
        },
        eventStream: {
            name: "string",
            eventStreamExpirationTime: "string",
            eventStreamStartTime: "string",
        },
        name: "string",
        notificationConfig: {
            payloadFormat: "string",
            pubsubTopic: "string",
            eventTypes: ["string"],
        },
        project: "string",
        schedule: {
            scheduleStartDate: {
                day: 0,
                month: 0,
                year: 0,
            },
            repeatInterval: "string",
            scheduleEndDate: {
                day: 0,
                month: 0,
                year: 0,
            },
            startTimeOfDay: {
                hours: 0,
                minutes: 0,
                nanos: 0,
                seconds: 0,
            },
        },
        status: "string",
    });
    
    type: gcp:storage:TransferJob
    properties:
        description: string
        eventStream:
            eventStreamExpirationTime: string
            eventStreamStartTime: string
            name: string
        name: string
        notificationConfig:
            eventTypes:
                - string
            payloadFormat: string
            pubsubTopic: string
        project: string
        schedule:
            repeatInterval: string
            scheduleEndDate:
                day: 0
                month: 0
                year: 0
            scheduleStartDate:
                day: 0
                month: 0
                year: 0
            startTimeOfDay:
                hours: 0
                minutes: 0
                nanos: 0
                seconds: 0
        status: string
        transferSpec:
            awsS3DataSource:
                awsAccessKey:
                    accessKeyId: string
                    secretAccessKey: string
                bucketName: string
                path: string
                roleArn: string
            azureBlobStorageDataSource:
                azureCredentials:
                    sasToken: string
                container: string
                credentialsSecret: string
                path: string
                storageAccount: string
            gcsDataSink:
                bucketName: string
                path: string
            gcsDataSource:
                bucketName: string
                path: string
            httpDataSource:
                listUrl: string
            objectConditions:
                excludePrefixes:
                    - string
                includePrefixes:
                    - string
                lastModifiedBefore: string
                lastModifiedSince: string
                maxTimeElapsedSinceLastModification: string
                minTimeElapsedSinceLastModification: string
            posixDataSink:
                rootDirectory: string
            posixDataSource:
                rootDirectory: string
            sinkAgentPoolName: string
            sourceAgentPoolName: string
            transferOptions:
                deleteObjectsFromSourceAfterTransfer: false
                deleteObjectsUniqueInSink: false
                overwriteObjectsAlreadyExistingInSink: false
                overwriteWhen: string
    

    TransferJob Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The TransferJob resource accepts the following input properties:

    Description string
    Unique description to identify the Transfer Job.
    TransferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    EventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    Name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    NotificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    Project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    Schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    Status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    Description string
    Unique description to identify the Transfer Job.
    TransferSpec TransferJobTransferSpecArgs
    Transfer specification. Structure documented below.


    EventStream TransferJobEventStreamArgs
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    Name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    NotificationConfig TransferJobNotificationConfigArgs
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    Project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    Schedule TransferJobScheduleArgs
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    Status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    description String
    Unique description to identify the Transfer Job.
    transferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    eventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    name String
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project String
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status String
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    description string
    Unique description to identify the Transfer Job.
    transferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    eventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    description str
    Unique description to identify the Transfer Job.
    transfer_spec TransferJobTransferSpecArgs
    Transfer specification. Structure documented below.


    event_stream TransferJobEventStreamArgs
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    name str
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notification_config TransferJobNotificationConfigArgs
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project str
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobScheduleArgs
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status str
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    description String
    Unique description to identify the Transfer Job.
    transferSpec Property Map
    Transfer specification. Structure documented below.


    eventStream Property Map
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    name String
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig Property Map
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project String
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule Property Map
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status String
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the TransferJob resource produces the following output properties:

    CreationTime string
    When the Transfer Job was created.
    DeletionTime string
    When the Transfer Job was deleted.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastModificationTime string
    When the Transfer Job was last modified.
    CreationTime string
    When the Transfer Job was created.
    DeletionTime string
    When the Transfer Job was deleted.
    Id string
    The provider-assigned unique ID for this managed resource.
    LastModificationTime string
    When the Transfer Job was last modified.
    creationTime String
    When the Transfer Job was created.
    deletionTime String
    When the Transfer Job was deleted.
    id String
    The provider-assigned unique ID for this managed resource.
    lastModificationTime String
    When the Transfer Job was last modified.
    creationTime string
    When the Transfer Job was created.
    deletionTime string
    When the Transfer Job was deleted.
    id string
    The provider-assigned unique ID for this managed resource.
    lastModificationTime string
    When the Transfer Job was last modified.
    creation_time str
    When the Transfer Job was created.
    deletion_time str
    When the Transfer Job was deleted.
    id str
    The provider-assigned unique ID for this managed resource.
    last_modification_time str
    When the Transfer Job was last modified.
    creationTime String
    When the Transfer Job was created.
    deletionTime String
    When the Transfer Job was deleted.
    id String
    The provider-assigned unique ID for this managed resource.
    lastModificationTime String
    When the Transfer Job was last modified.

    Look up Existing TransferJob Resource

    Get an existing TransferJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: TransferJobState, opts?: CustomResourceOptions): TransferJob
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            creation_time: Optional[str] = None,
            deletion_time: Optional[str] = None,
            description: Optional[str] = None,
            event_stream: Optional[TransferJobEventStreamArgs] = None,
            last_modification_time: Optional[str] = None,
            name: Optional[str] = None,
            notification_config: Optional[TransferJobNotificationConfigArgs] = None,
            project: Optional[str] = None,
            schedule: Optional[TransferJobScheduleArgs] = None,
            status: Optional[str] = None,
            transfer_spec: Optional[TransferJobTransferSpecArgs] = None) -> TransferJob
    func GetTransferJob(ctx *Context, name string, id IDInput, state *TransferJobState, opts ...ResourceOption) (*TransferJob, error)
    public static TransferJob Get(string name, Input<string> id, TransferJobState? state, CustomResourceOptions? opts = null)
    public static TransferJob get(String name, Output<String> id, TransferJobState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    CreationTime string
    When the Transfer Job was created.
    DeletionTime string
    When the Transfer Job was deleted.
    Description string
    Unique description to identify the Transfer Job.
    EventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    LastModificationTime string
    When the Transfer Job was last modified.
    Name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    NotificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    Project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    Schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    Status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    TransferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    CreationTime string
    When the Transfer Job was created.
    DeletionTime string
    When the Transfer Job was deleted.
    Description string
    Unique description to identify the Transfer Job.
    EventStream TransferJobEventStreamArgs
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    LastModificationTime string
    When the Transfer Job was last modified.
    Name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    NotificationConfig TransferJobNotificationConfigArgs
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    Project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    Schedule TransferJobScheduleArgs
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    Status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    TransferSpec TransferJobTransferSpecArgs
    Transfer specification. Structure documented below.


    creationTime String
    When the Transfer Job was created.
    deletionTime String
    When the Transfer Job was deleted.
    description String
    Unique description to identify the Transfer Job.
    eventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    lastModificationTime String
    When the Transfer Job was last modified.
    name String
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project String
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status String
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    transferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    creationTime string
    When the Transfer Job was created.
    deletionTime string
    When the Transfer Job was deleted.
    description string
    Unique description to identify the Transfer Job.
    eventStream TransferJobEventStream
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    lastModificationTime string
    When the Transfer Job was last modified.
    name string
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig TransferJobNotificationConfig
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project string
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobSchedule
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status string
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    transferSpec TransferJobTransferSpec
    Transfer specification. Structure documented below.


    creation_time str
    When the Transfer Job was created.
    deletion_time str
    When the Transfer Job was deleted.
    description str
    Unique description to identify the Transfer Job.
    event_stream TransferJobEventStreamArgs
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    last_modification_time str
    When the Transfer Job was last modified.
    name str
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notification_config TransferJobNotificationConfigArgs
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project str
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule TransferJobScheduleArgs
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status str
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    transfer_spec TransferJobTransferSpecArgs
    Transfer specification. Structure documented below.


    creationTime String
    When the Transfer Job was created.
    deletionTime String
    When the Transfer Job was deleted.
    description String
    Unique description to identify the Transfer Job.
    eventStream Property Map
    Specifies the Event-driven transfer options. Event-driven transfers listen to an event stream to transfer updated files. Structure documented below Either event_stream or schedule must be set.
    lastModificationTime String
    When the Transfer Job was last modified.
    name String
    The name of the Transfer Job. This name must start with "transferJobs/" prefix and end with a letter or a number, and should be no more than 128 characters ( transferJobs/^(?!OPI)[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For transfers involving PosixFilesystem, this name must start with transferJobs/OPI specifically ( transferJobs/OPI^[A-Za-z0-9-._~]*[A-Za-z0-9]$ ). For all other transfer types, this name must not start with transferJobs/OPI. Default the provider will assign a random unique name with transferJobs/{{name}} format, where name is a numeric value.
    notificationConfig Property Map
    Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
    project String
    The project in which the resource belongs. If it is not provided, the provider project is used.
    schedule Property Map
    Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below. Either schedule or event_stream must be set.
    status String
    Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
    transferSpec Property Map
    Transfer specification. Structure documented below.


    Supporting Types

    TransferJobEventStream, TransferJobEventStreamArgs

    Name string
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    EventStreamExpirationTime string
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    EventStreamStartTime string
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    Name string
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    EventStreamExpirationTime string
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    EventStreamStartTime string
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    name String
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    eventStreamExpirationTime String
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    eventStreamStartTime String
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    name string
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    eventStreamExpirationTime string
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    eventStreamStartTime string
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    name str
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    event_stream_expiration_time str
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    event_stream_start_time str
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    name String
    Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'.
    eventStreamExpirationTime String
    Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated.A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    eventStreamStartTime String
    Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".

    TransferJobNotificationConfig, TransferJobNotificationConfigArgs

    PayloadFormat string
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    PubsubTopic string
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    EventTypes List<string>
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
    PayloadFormat string
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    PubsubTopic string
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    EventTypes []string
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
    payloadFormat String
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    pubsubTopic String
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    eventTypes List<String>
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
    payloadFormat string
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    pubsubTopic string
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    eventTypes string[]
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
    payload_format str
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    pubsub_topic str
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    event_types Sequence[str]
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
    payloadFormat String
    The desired format of the notification message payloads. One of "NONE" or "JSON".
    pubsubTopic String
    The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
    eventTypes List<String>
    Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".

    TransferJobSchedule, TransferJobScheduleArgs

    ScheduleStartDate TransferJobScheduleScheduleStartDate
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    RepeatInterval string
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    ScheduleEndDate TransferJobScheduleScheduleEndDate
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    StartTimeOfDay TransferJobScheduleStartTimeOfDay
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
    ScheduleStartDate TransferJobScheduleScheduleStartDate
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    RepeatInterval string
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    ScheduleEndDate TransferJobScheduleScheduleEndDate
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    StartTimeOfDay TransferJobScheduleStartTimeOfDay
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
    scheduleStartDate TransferJobScheduleScheduleStartDate
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    repeatInterval String
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    scheduleEndDate TransferJobScheduleScheduleEndDate
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    startTimeOfDay TransferJobScheduleStartTimeOfDay
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
    scheduleStartDate TransferJobScheduleScheduleStartDate
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    repeatInterval string
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    scheduleEndDate TransferJobScheduleScheduleEndDate
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    startTimeOfDay TransferJobScheduleStartTimeOfDay
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
    schedule_start_date TransferJobScheduleScheduleStartDate
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    repeat_interval str
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    schedule_end_date TransferJobScheduleScheduleEndDate
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    start_time_of_day TransferJobScheduleStartTimeOfDay
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
    scheduleStartDate Property Map
    The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day. Structure documented below.
    repeatInterval String
    Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    scheduleEndDate Property Map
    The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once. Structure documented below.
    startTimeOfDay Property Map
    The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.

    TransferJobScheduleScheduleEndDate, TransferJobScheduleScheduleEndDateArgs

    Day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    Month int
    Month of year. Must be from 1 to 12.
    Year int
    Year of date. Must be from 1 to 9999.
    Day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    Month int
    Month of year. Must be from 1 to 12.
    Year int
    Year of date. Must be from 1 to 9999.
    day Integer

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month Integer
    Month of year. Must be from 1 to 12.
    year Integer
    Year of date. Must be from 1 to 9999.
    day number

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month number
    Month of year. Must be from 1 to 12.
    year number
    Year of date. Must be from 1 to 9999.
    day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month int
    Month of year. Must be from 1 to 12.
    year int
    Year of date. Must be from 1 to 9999.
    day Number

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month Number
    Month of year. Must be from 1 to 12.
    year Number
    Year of date. Must be from 1 to 9999.

    TransferJobScheduleScheduleStartDate, TransferJobScheduleScheduleStartDateArgs

    Day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    Month int
    Month of year. Must be from 1 to 12.
    Year int
    Year of date. Must be from 1 to 9999.
    Day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    Month int
    Month of year. Must be from 1 to 12.
    Year int
    Year of date. Must be from 1 to 9999.
    day Integer

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month Integer
    Month of year. Must be from 1 to 12.
    year Integer
    Year of date. Must be from 1 to 9999.
    day number

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month number
    Month of year. Must be from 1 to 12.
    year number
    Year of date. Must be from 1 to 9999.
    day int

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month int
    Month of year. Must be from 1 to 12.
    year int
    Year of date. Must be from 1 to 9999.
    day Number

    Day of month. Must be from 1 to 31 and valid for the year and month.

    The start_time_of_day blocks support:

    month Number
    Month of year. Must be from 1 to 12.
    year Number
    Year of date. Must be from 1 to 9999.

    TransferJobScheduleStartTimeOfDay, TransferJobScheduleStartTimeOfDayArgs

    Hours int
    Hours of day in 24 hour format. Should be from 0 to 23
    Minutes int
    Minutes of hour of day. Must be from 0 to 59.
    Nanos int
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    Seconds int
    Seconds of minutes of the time. Must normally be from 0 to 59.
    Hours int
    Hours of day in 24 hour format. Should be from 0 to 23
    Minutes int
    Minutes of hour of day. Must be from 0 to 59.
    Nanos int
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    Seconds int
    Seconds of minutes of the time. Must normally be from 0 to 59.
    hours Integer
    Hours of day in 24 hour format. Should be from 0 to 23
    minutes Integer
    Minutes of hour of day. Must be from 0 to 59.
    nanos Integer
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    seconds Integer
    Seconds of minutes of the time. Must normally be from 0 to 59.
    hours number
    Hours of day in 24 hour format. Should be from 0 to 23
    minutes number
    Minutes of hour of day. Must be from 0 to 59.
    nanos number
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    seconds number
    Seconds of minutes of the time. Must normally be from 0 to 59.
    hours int
    Hours of day in 24 hour format. Should be from 0 to 23
    minutes int
    Minutes of hour of day. Must be from 0 to 59.
    nanos int
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    seconds int
    Seconds of minutes of the time. Must normally be from 0 to 59.
    hours Number
    Hours of day in 24 hour format. Should be from 0 to 23
    minutes Number
    Minutes of hour of day. Must be from 0 to 59.
    nanos Number
    Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.
    seconds Number
    Seconds of minutes of the time. Must normally be from 0 to 59.

    TransferJobTransferSpec, TransferJobTransferSpecArgs

    AwsS3DataSource TransferJobTransferSpecAwsS3DataSource
    An AWS S3 data source. Structure documented below.
    AzureBlobStorageDataSource TransferJobTransferSpecAzureBlobStorageDataSource
    An Azure Blob Storage data source. Structure documented below.
    GcsDataSink TransferJobTransferSpecGcsDataSink
    A Google Cloud Storage data sink. Structure documented below.
    GcsDataSource TransferJobTransferSpecGcsDataSource
    A Google Cloud Storage data source. Structure documented below.
    HttpDataSource TransferJobTransferSpecHttpDataSource
    A HTTP URL data source. Structure documented below.
    ObjectConditions TransferJobTransferSpecObjectConditions
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    PosixDataSink TransferJobTransferSpecPosixDataSink
    A POSIX data sink. Structure documented below.
    PosixDataSource TransferJobTransferSpecPosixDataSource
    A POSIX filesystem data source. Structure documented below.
    SinkAgentPoolName string
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    SourceAgentPoolName string
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    TransferOptions TransferJobTransferSpecTransferOptions
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
    AwsS3DataSource TransferJobTransferSpecAwsS3DataSource
    An AWS S3 data source. Structure documented below.
    AzureBlobStorageDataSource TransferJobTransferSpecAzureBlobStorageDataSource
    An Azure Blob Storage data source. Structure documented below.
    GcsDataSink TransferJobTransferSpecGcsDataSink
    A Google Cloud Storage data sink. Structure documented below.
    GcsDataSource TransferJobTransferSpecGcsDataSource
    A Google Cloud Storage data source. Structure documented below.
    HttpDataSource TransferJobTransferSpecHttpDataSource
    A HTTP URL data source. Structure documented below.
    ObjectConditions TransferJobTransferSpecObjectConditions
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    PosixDataSink TransferJobTransferSpecPosixDataSink
    A POSIX data sink. Structure documented below.
    PosixDataSource TransferJobTransferSpecPosixDataSource
    A POSIX filesystem data source. Structure documented below.
    SinkAgentPoolName string
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    SourceAgentPoolName string
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    TransferOptions TransferJobTransferSpecTransferOptions
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
    awsS3DataSource TransferJobTransferSpecAwsS3DataSource
    An AWS S3 data source. Structure documented below.
    azureBlobStorageDataSource TransferJobTransferSpecAzureBlobStorageDataSource
    An Azure Blob Storage data source. Structure documented below.
    gcsDataSink TransferJobTransferSpecGcsDataSink
    A Google Cloud Storage data sink. Structure documented below.
    gcsDataSource TransferJobTransferSpecGcsDataSource
    A Google Cloud Storage data source. Structure documented below.
    httpDataSource TransferJobTransferSpecHttpDataSource
    A HTTP URL data source. Structure documented below.
    objectConditions TransferJobTransferSpecObjectConditions
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    posixDataSink TransferJobTransferSpecPosixDataSink
    A POSIX data sink. Structure documented below.
    posixDataSource TransferJobTransferSpecPosixDataSource
    A POSIX filesystem data source. Structure documented below.
    sinkAgentPoolName String
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    sourceAgentPoolName String
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    transferOptions TransferJobTransferSpecTransferOptions
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
    awsS3DataSource TransferJobTransferSpecAwsS3DataSource
    An AWS S3 data source. Structure documented below.
    azureBlobStorageDataSource TransferJobTransferSpecAzureBlobStorageDataSource
    An Azure Blob Storage data source. Structure documented below.
    gcsDataSink TransferJobTransferSpecGcsDataSink
    A Google Cloud Storage data sink. Structure documented below.
    gcsDataSource TransferJobTransferSpecGcsDataSource
    A Google Cloud Storage data source. Structure documented below.
    httpDataSource TransferJobTransferSpecHttpDataSource
    A HTTP URL data source. Structure documented below.
    objectConditions TransferJobTransferSpecObjectConditions
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    posixDataSink TransferJobTransferSpecPosixDataSink
    A POSIX data sink. Structure documented below.
    posixDataSource TransferJobTransferSpecPosixDataSource
    A POSIX filesystem data source. Structure documented below.
    sinkAgentPoolName string
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    sourceAgentPoolName string
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    transferOptions TransferJobTransferSpecTransferOptions
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
    aws_s3_data_source TransferJobTransferSpecAwsS3DataSource
    An AWS S3 data source. Structure documented below.
    azure_blob_storage_data_source TransferJobTransferSpecAzureBlobStorageDataSource
    An Azure Blob Storage data source. Structure documented below.
    gcs_data_sink TransferJobTransferSpecGcsDataSink
    A Google Cloud Storage data sink. Structure documented below.
    gcs_data_source TransferJobTransferSpecGcsDataSource
    A Google Cloud Storage data source. Structure documented below.
    http_data_source TransferJobTransferSpecHttpDataSource
    A HTTP URL data source. Structure documented below.
    object_conditions TransferJobTransferSpecObjectConditions
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    posix_data_sink TransferJobTransferSpecPosixDataSink
    A POSIX data sink. Structure documented below.
    posix_data_source TransferJobTransferSpecPosixDataSource
    A POSIX filesystem data source. Structure documented below.
    sink_agent_pool_name str
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    source_agent_pool_name str
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    transfer_options TransferJobTransferSpecTransferOptions
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
    awsS3DataSource Property Map
    An AWS S3 data source. Structure documented below.
    azureBlobStorageDataSource Property Map
    An Azure Blob Storage data source. Structure documented below.
    gcsDataSink Property Map
    A Google Cloud Storage data sink. Structure documented below.
    gcsDataSource Property Map
    A Google Cloud Storage data source. Structure documented below.
    httpDataSource Property Map
    A HTTP URL data source. Structure documented below.
    objectConditions Property Map
    Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink. Structure documented below.
    posixDataSink Property Map
    A POSIX data sink. Structure documented below.
    posixDataSource Property Map
    A POSIX filesystem data source. Structure documented below.
    sinkAgentPoolName String
    Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
    sourceAgentPoolName String
    Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
    transferOptions Property Map
    Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink. Structure documented below.

    TransferJobTransferSpecAwsS3DataSource, TransferJobTransferSpecAwsS3DataSourceArgs

    BucketName string
    Google Cloud Storage bucket name.
    AwsAccessKey TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
    AWS credentials block.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    RoleArn string
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
    BucketName string
    Google Cloud Storage bucket name.
    AwsAccessKey TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
    AWS credentials block.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    RoleArn string
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
    bucketName String
    Google Cloud Storage bucket name.
    awsAccessKey TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
    AWS credentials block.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    roleArn String
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
    bucketName string
    Google Cloud Storage bucket name.
    awsAccessKey TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
    AWS credentials block.
    path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    roleArn string
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
    bucket_name str
    Google Cloud Storage bucket name.
    aws_access_key TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
    AWS credentials block.
    path str
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    role_arn str
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
    bucketName String
    Google Cloud Storage bucket name.
    awsAccessKey Property Map
    AWS credentials block.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    roleArn String
    The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.

    TransferJobTransferSpecAwsS3DataSourceAwsAccessKey, TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs

    AccessKeyId string
    AWS Key ID.
    SecretAccessKey string
    AWS Secret Access Key.
    AccessKeyId string
    AWS Key ID.
    SecretAccessKey string
    AWS Secret Access Key.
    accessKeyId String
    AWS Key ID.
    secretAccessKey String
    AWS Secret Access Key.
    accessKeyId string
    AWS Key ID.
    secretAccessKey string
    AWS Secret Access Key.
    access_key_id str
    AWS Key ID.
    secret_access_key str
    AWS Secret Access Key.
    accessKeyId String
    AWS Key ID.
    secretAccessKey String
    AWS Secret Access Key.

    TransferJobTransferSpecAzureBlobStorageDataSource, TransferJobTransferSpecAzureBlobStorageDataSourceArgs

    Container string
    The container to transfer from the Azure Storage account.`
    StorageAccount string
    The name of the Azure Storage account.
    AzureCredentials TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
    Credentials used to authenticate API requests to Azure block.
    CredentialsSecret string
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    Container string
    The container to transfer from the Azure Storage account.`
    StorageAccount string
    The name of the Azure Storage account.
    AzureCredentials TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
    Credentials used to authenticate API requests to Azure block.
    CredentialsSecret string
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    container String
    The container to transfer from the Azure Storage account.`
    storageAccount String
    The name of the Azure Storage account.
    azureCredentials TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
    Credentials used to authenticate API requests to Azure block.
    credentialsSecret String
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    container string
    The container to transfer from the Azure Storage account.`
    storageAccount string
    The name of the Azure Storage account.
    azureCredentials TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
    Credentials used to authenticate API requests to Azure block.
    credentialsSecret string
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    container str
    The container to transfer from the Azure Storage account.`
    storage_account str
    The name of the Azure Storage account.
    azure_credentials TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
    Credentials used to authenticate API requests to Azure block.
    credentials_secret str
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    path str
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    container String
    The container to transfer from the Azure Storage account.`
    storageAccount String
    The name of the Azure Storage account.
    azureCredentials Property Map
    Credentials used to authenticate API requests to Azure block.
    credentialsSecret String
    Full Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent for Storage Transfer must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`,
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.

    TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials, TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentialsArgs

    SasToken string

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    SasToken string

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    sasToken String

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    sasToken string

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    sas_token str

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    sasToken String

    Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).

    The schedule_start_date and schedule_end_date blocks support:

    TransferJobTransferSpecGcsDataSink, TransferJobTransferSpecGcsDataSinkArgs

    BucketName string
    Google Cloud Storage bucket name.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    BucketName string
    Google Cloud Storage bucket name.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName String
    Google Cloud Storage bucket name.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName string
    Google Cloud Storage bucket name.
    path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucket_name str
    Google Cloud Storage bucket name.
    path str
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName String
    Google Cloud Storage bucket name.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.

    TransferJobTransferSpecGcsDataSource, TransferJobTransferSpecGcsDataSourceArgs

    BucketName string
    Google Cloud Storage bucket name.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    BucketName string
    Google Cloud Storage bucket name.
    Path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName String
    Google Cloud Storage bucket name.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName string
    Google Cloud Storage bucket name.
    path string
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucket_name str
    Google Cloud Storage bucket name.
    path str
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
    bucketName String
    Google Cloud Storage bucket name.
    path String
    Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.

    TransferJobTransferSpecHttpDataSource, TransferJobTransferSpecHttpDataSourceArgs

    ListUrl string
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
    ListUrl string
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
    listUrl String
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
    listUrl string
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
    list_url str
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
    listUrl String
    The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.

    TransferJobTransferSpecObjectConditions, TransferJobTransferSpecObjectConditionsArgs

    ExcludePrefixes List<string>
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    IncludePrefixes List<string>
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    LastModifiedBefore string
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    LastModifiedSince string
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    MaxTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    MinTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    ExcludePrefixes []string
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    IncludePrefixes []string
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    LastModifiedBefore string
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    LastModifiedSince string
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    MaxTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    MinTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    excludePrefixes List<String>
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    includePrefixes List<String>
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    lastModifiedBefore String
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    lastModifiedSince String
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    maxTimeElapsedSinceLastModification String
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    minTimeElapsedSinceLastModification String
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    excludePrefixes string[]
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    includePrefixes string[]
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    lastModifiedBefore string
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    lastModifiedSince string
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    maxTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    minTimeElapsedSinceLastModification string
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    exclude_prefixes Sequence[str]
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    include_prefixes Sequence[str]
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    last_modified_before str
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    last_modified_since str
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    max_time_elapsed_since_last_modification str
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    min_time_elapsed_since_last_modification str
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    excludePrefixes List<String>
    exclude_prefixes must follow the requirements described for include_prefixes. See Requirements.
    includePrefixes List<String>
    If include_prefixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions. See Requirements.
    lastModifiedBefore String
    If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    lastModifiedSince String
    If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
    maxTimeElapsedSinceLastModification String
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
    minTimeElapsedSinceLastModification String
    A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".

    TransferJobTransferSpecPosixDataSink, TransferJobTransferSpecPosixDataSinkArgs

    RootDirectory string
    Root directory path to the filesystem.
    RootDirectory string
    Root directory path to the filesystem.
    rootDirectory String
    Root directory path to the filesystem.
    rootDirectory string
    Root directory path to the filesystem.
    root_directory str
    Root directory path to the filesystem.
    rootDirectory String
    Root directory path to the filesystem.

    TransferJobTransferSpecPosixDataSource, TransferJobTransferSpecPosixDataSourceArgs

    RootDirectory string

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    RootDirectory string

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    rootDirectory String

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    rootDirectory string

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    root_directory str

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    rootDirectory String

    Root directory path to the filesystem.

    The aws_s3_data_source block supports:

    TransferJobTransferSpecTransferOptions, TransferJobTransferSpecTransferOptionsArgs

    DeleteObjectsFromSourceAfterTransfer bool
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    DeleteObjectsUniqueInSink bool
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    OverwriteObjectsAlreadyExistingInSink bool
    Whether overwriting objects that already exist in the sink is allowed.
    OverwriteWhen string
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.
    DeleteObjectsFromSourceAfterTransfer bool
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    DeleteObjectsUniqueInSink bool
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    OverwriteObjectsAlreadyExistingInSink bool
    Whether overwriting objects that already exist in the sink is allowed.
    OverwriteWhen string
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.
    deleteObjectsFromSourceAfterTransfer Boolean
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    deleteObjectsUniqueInSink Boolean
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    overwriteObjectsAlreadyExistingInSink Boolean
    Whether overwriting objects that already exist in the sink is allowed.
    overwriteWhen String
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.
    deleteObjectsFromSourceAfterTransfer boolean
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    deleteObjectsUniqueInSink boolean
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    overwriteObjectsAlreadyExistingInSink boolean
    Whether overwriting objects that already exist in the sink is allowed.
    overwriteWhen string
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.
    delete_objects_from_source_after_transfer bool
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    delete_objects_unique_in_sink bool
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    overwrite_objects_already_existing_in_sink bool
    Whether overwriting objects that already exist in the sink is allowed.
    overwrite_when str
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.
    deleteObjectsFromSourceAfterTransfer Boolean
    Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.
    deleteObjectsUniqueInSink Boolean
    Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.
    overwriteObjectsAlreadyExistingInSink Boolean
    Whether overwriting objects that already exist in the sink is allowed.
    overwriteWhen String
    When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwrite_objects_already_existing_in_sink. Possible values: ALWAYS, DIFFERENT, NEVER.

    Import

    Storage Transfer Jobs can be imported using the Transfer Job’s project and name (without the transferJob/ prefix), e.g.

    • {{project_id}}/{{name}}, where name is a numeric value.

    When using the pulumi import command, Storage Transfer Jobs can be imported using one of the formats above. For example:

    $ pulumi import gcp:storage/transferJob:TransferJob default {{project_id}}/123456789
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud Classic v7.19.0 published on Thursday, Apr 18, 2024 by Pulumi