gcp.storage.TransferJob
Creates a new Transfer Job in Google Cloud Storage Transfer.
To get more information about Google Cloud Storage Transfer, see:
Example Usage
Example creating a nightly Transfer Job from an AWS S3 Bucket to a GCS bucket.
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var @default = Gcp.Storage.GetTransferProjectServieAccount.Invoke(new()
{
Project = @var.Project,
});
var s3_backup_bucketBucket = new Gcp.Storage.Bucket("s3-backup-bucketBucket", new()
{
StorageClass = "NEARLINE",
Project = @var.Project,
Location = "US",
});
var s3_backup_bucketBucketIAMMember = new Gcp.Storage.BucketIAMMember("s3-backup-bucketBucketIAMMember", new()
{
Bucket = s3_backup_bucketBucket.Name,
Role = "roles/storage.admin",
Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServieAccountResult => getTransferProjectServieAccountResult.Email)}"),
}, new CustomResourceOptions
{
DependsOn = new[]
{
s3_backup_bucketBucket,
},
});
var topic = new Gcp.PubSub.Topic("topic");
var notificationConfig = new Gcp.PubSub.TopicIAMMember("notificationConfig", new()
{
Topic = topic.Id,
Role = "roles/pubsub.publisher",
Member = @default.Apply(@default => $"serviceAccount:{@default.Apply(getTransferProjectServieAccountResult => getTransferProjectServieAccountResult.Email)}"),
});
var s3_bucket_nightly_backup = new Gcp.Storage.TransferJob("s3-bucket-nightly-backup", new()
{
Description = "Nightly backup of S3 bucket",
Project = @var.Project,
TransferSpec = new Gcp.Storage.Inputs.TransferJobTransferSpecArgs
{
ObjectConditions = new Gcp.Storage.Inputs.TransferJobTransferSpecObjectConditionsArgs
{
MaxTimeElapsedSinceLastModification = "600s",
ExcludePrefixes = new[]
{
"requests.gz",
},
},
TransferOptions = new Gcp.Storage.Inputs.TransferJobTransferSpecTransferOptionsArgs
{
DeleteObjectsUniqueInSink = false,
},
AwsS3DataSource = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceArgs
{
BucketName = @var.Aws_s3_bucket,
AwsAccessKey = new Gcp.Storage.Inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs
{
AccessKeyId = @var.Aws_access_key,
SecretAccessKey = @var.Aws_secret_key,
},
},
GcsDataSink = new Gcp.Storage.Inputs.TransferJobTransferSpecGcsDataSinkArgs
{
BucketName = s3_backup_bucketBucket.Name,
Path = "foo/bar/",
},
},
Schedule = new Gcp.Storage.Inputs.TransferJobScheduleArgs
{
ScheduleStartDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleStartDateArgs
{
Year = 2018,
Month = 10,
Day = 1,
},
ScheduleEndDate = new Gcp.Storage.Inputs.TransferJobScheduleScheduleEndDateArgs
{
Year = 2019,
Month = 1,
Day = 15,
},
StartTimeOfDay = new Gcp.Storage.Inputs.TransferJobScheduleStartTimeOfDayArgs
{
Hours = 23,
Minutes = 30,
Seconds = 0,
Nanos = 0,
},
RepeatInterval = "604800s",
},
NotificationConfig = new Gcp.Storage.Inputs.TransferJobNotificationConfigArgs
{
PubsubTopic = topic.Id,
EventTypes = new[]
{
"TRANSFER_OPERATION_SUCCESS",
"TRANSFER_OPERATION_FAILED",
},
PayloadFormat = "JSON",
},
}, new CustomResourceOptions
{
DependsOn = new[]
{
s3_backup_bucketBucketIAMMember,
notificationConfig,
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/pubsub"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_default, err := storage.GetTransferProjectServieAccount(ctx, &storage.GetTransferProjectServieAccountArgs{
Project: pulumi.StringRef(_var.Project),
}, nil)
if err != nil {
return err
}
_, err = storage.NewBucket(ctx, "s3-backup-bucketBucket", &storage.BucketArgs{
StorageClass: pulumi.String("NEARLINE"),
Project: pulumi.Any(_var.Project),
Location: pulumi.String("US"),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "s3-backup-bucketBucketIAMMember", &storage.BucketIAMMemberArgs{
Bucket: s3_backup_bucketBucket.Name,
Role: pulumi.String("roles/storage.admin"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
}, pulumi.DependsOn([]pulumi.Resource{
s3_backup_bucketBucket,
}))
if err != nil {
return err
}
topic, err := pubsub.NewTopic(ctx, "topic", nil)
if err != nil {
return err
}
notificationConfig, err := pubsub.NewTopicIAMMember(ctx, "notificationConfig", &pubsub.TopicIAMMemberArgs{
Topic: topic.ID(),
Role: pulumi.String("roles/pubsub.publisher"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", _default.Email)),
})
if err != nil {
return err
}
_, err = storage.NewTransferJob(ctx, "s3-bucket-nightly-backup", &storage.TransferJobArgs{
Description: pulumi.String("Nightly backup of S3 bucket"),
Project: pulumi.Any(_var.Project),
TransferSpec: &storage.TransferJobTransferSpecArgs{
ObjectConditions: &storage.TransferJobTransferSpecObjectConditionsArgs{
MaxTimeElapsedSinceLastModification: pulumi.String("600s"),
ExcludePrefixes: pulumi.StringArray{
pulumi.String("requests.gz"),
},
},
TransferOptions: &storage.TransferJobTransferSpecTransferOptionsArgs{
DeleteObjectsUniqueInSink: pulumi.Bool(false),
},
AwsS3DataSource: &storage.TransferJobTransferSpecAwsS3DataSourceArgs{
BucketName: pulumi.Any(_var.Aws_s3_bucket),
AwsAccessKey: &storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs{
AccessKeyId: pulumi.Any(_var.Aws_access_key),
SecretAccessKey: pulumi.Any(_var.Aws_secret_key),
},
},
GcsDataSink: &storage.TransferJobTransferSpecGcsDataSinkArgs{
BucketName: s3_backup_bucketBucket.Name,
Path: pulumi.String("foo/bar/"),
},
},
Schedule: &storage.TransferJobScheduleArgs{
ScheduleStartDate: &storage.TransferJobScheduleScheduleStartDateArgs{
Year: pulumi.Int(2018),
Month: pulumi.Int(10),
Day: pulumi.Int(1),
},
ScheduleEndDate: &storage.TransferJobScheduleScheduleEndDateArgs{
Year: pulumi.Int(2019),
Month: pulumi.Int(1),
Day: pulumi.Int(15),
},
StartTimeOfDay: &storage.TransferJobScheduleStartTimeOfDayArgs{
Hours: pulumi.Int(23),
Minutes: pulumi.Int(30),
Seconds: pulumi.Int(0),
Nanos: pulumi.Int(0),
},
RepeatInterval: pulumi.String("604800s"),
},
NotificationConfig: &storage.TransferJobNotificationConfigArgs{
PubsubTopic: topic.ID(),
EventTypes: pulumi.StringArray{
pulumi.String("TRANSFER_OPERATION_SUCCESS"),
pulumi.String("TRANSFER_OPERATION_FAILED"),
},
PayloadFormat: pulumi.String("JSON"),
},
}, pulumi.DependsOn([]pulumi.Resource{
s3_backup_bucketBucketIAMMember,
notificationConfig,
}))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.storage.StorageFunctions;
import com.pulumi.gcp.storage.inputs.GetTransferProjectServieAccountArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketIAMMember;
import com.pulumi.gcp.storage.BucketIAMMemberArgs;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.pubsub.TopicIAMMember;
import com.pulumi.gcp.pubsub.TopicIAMMemberArgs;
import com.pulumi.gcp.storage.TransferJob;
import com.pulumi.gcp.storage.TransferJobArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecObjectConditionsArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecTransferOptionsArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs;
import com.pulumi.gcp.storage.inputs.TransferJobTransferSpecGcsDataSinkArgs;
import com.pulumi.gcp.storage.inputs.TransferJobScheduleArgs;
import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleStartDateArgs;
import com.pulumi.gcp.storage.inputs.TransferJobScheduleScheduleEndDateArgs;
import com.pulumi.gcp.storage.inputs.TransferJobScheduleStartTimeOfDayArgs;
import com.pulumi.gcp.storage.inputs.TransferJobNotificationConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var default = StorageFunctions.getTransferProjectServieAccount(GetTransferProjectServieAccountArgs.builder()
.project(var_.project())
.build());
var s3_backup_bucketBucket = new Bucket("s3-backup-bucketBucket", BucketArgs.builder()
.storageClass("NEARLINE")
.project(var_.project())
.location("US")
.build());
var s3_backup_bucketBucketIAMMember = new BucketIAMMember("s3-backup-bucketBucketIAMMember", BucketIAMMemberArgs.builder()
.bucket(s3_backup_bucketBucket.name())
.role("roles/storage.admin")
.member(String.format("serviceAccount:%s", default_.email()))
.build(), CustomResourceOptions.builder()
.dependsOn(s3_backup_bucketBucket)
.build());
var topic = new Topic("topic");
var notificationConfig = new TopicIAMMember("notificationConfig", TopicIAMMemberArgs.builder()
.topic(topic.id())
.role("roles/pubsub.publisher")
.member(String.format("serviceAccount:%s", default_.email()))
.build());
var s3_bucket_nightly_backup = new TransferJob("s3-bucket-nightly-backup", TransferJobArgs.builder()
.description("Nightly backup of S3 bucket")
.project(var_.project())
.transferSpec(TransferJobTransferSpecArgs.builder()
.objectConditions(TransferJobTransferSpecObjectConditionsArgs.builder()
.maxTimeElapsedSinceLastModification("600s")
.excludePrefixes("requests.gz")
.build())
.transferOptions(TransferJobTransferSpecTransferOptionsArgs.builder()
.deleteObjectsUniqueInSink(false)
.build())
.awsS3DataSource(TransferJobTransferSpecAwsS3DataSourceArgs.builder()
.bucketName(var_.aws_s3_bucket())
.awsAccessKey(TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs.builder()
.accessKeyId(var_.aws_access_key())
.secretAccessKey(var_.aws_secret_key())
.build())
.build())
.gcsDataSink(TransferJobTransferSpecGcsDataSinkArgs.builder()
.bucketName(s3_backup_bucketBucket.name())
.path("foo/bar/")
.build())
.build())
.schedule(TransferJobScheduleArgs.builder()
.scheduleStartDate(TransferJobScheduleScheduleStartDateArgs.builder()
.year(2018)
.month(10)
.day(1)
.build())
.scheduleEndDate(TransferJobScheduleScheduleEndDateArgs.builder()
.year(2019)
.month(1)
.day(15)
.build())
.startTimeOfDay(TransferJobScheduleStartTimeOfDayArgs.builder()
.hours(23)
.minutes(30)
.seconds(0)
.nanos(0)
.build())
.repeatInterval("604800s")
.build())
.notificationConfig(TransferJobNotificationConfigArgs.builder()
.pubsubTopic(topic.id())
.eventTypes(
"TRANSFER_OPERATION_SUCCESS",
"TRANSFER_OPERATION_FAILED")
.payloadFormat("JSON")
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(
s3_backup_bucketBucketIAMMember,
notificationConfig)
.build());
}
}
import pulumi
import pulumi_gcp as gcp
default = gcp.storage.get_transfer_project_servie_account(project=var["project"])
s3_backup_bucket_bucket = gcp.storage.Bucket("s3-backup-bucketBucket",
storage_class="NEARLINE",
project=var["project"],
location="US")
s3_backup_bucket_bucket_iam_member = gcp.storage.BucketIAMMember("s3-backup-bucketBucketIAMMember",
bucket=s3_backup_bucket_bucket.name,
role="roles/storage.admin",
member=f"serviceAccount:{default.email}",
opts=pulumi.ResourceOptions(depends_on=[s3_backup_bucket_bucket]))
topic = gcp.pubsub.Topic("topic")
notification_config = gcp.pubsub.TopicIAMMember("notificationConfig",
topic=topic.id,
role="roles/pubsub.publisher",
member=f"serviceAccount:{default.email}")
s3_bucket_nightly_backup = gcp.storage.TransferJob("s3-bucket-nightly-backup",
description="Nightly backup of S3 bucket",
project=var["project"],
transfer_spec=gcp.storage.TransferJobTransferSpecArgs(
object_conditions=gcp.storage.TransferJobTransferSpecObjectConditionsArgs(
max_time_elapsed_since_last_modification="600s",
exclude_prefixes=["requests.gz"],
),
transfer_options=gcp.storage.TransferJobTransferSpecTransferOptionsArgs(
delete_objects_unique_in_sink=False,
),
aws_s3_data_source=gcp.storage.TransferJobTransferSpecAwsS3DataSourceArgs(
bucket_name=var["aws_s3_bucket"],
aws_access_key=gcp.storage.TransferJobTransferSpecAwsS3DataSourceAwsAccessKeyArgs(
access_key_id=var["aws_access_key"],
secret_access_key=var["aws_secret_key"],
),
),
gcs_data_sink=gcp.storage.TransferJobTransferSpecGcsDataSinkArgs(
bucket_name=s3_backup_bucket_bucket.name,
path="foo/bar/",
),
),
schedule=gcp.storage.TransferJobScheduleArgs(
schedule_start_date=gcp.storage.TransferJobScheduleScheduleStartDateArgs(
year=2018,
month=10,
day=1,
),
schedule_end_date=gcp.storage.TransferJobScheduleScheduleEndDateArgs(
year=2019,
month=1,
day=15,
),
start_time_of_day=gcp.storage.TransferJobScheduleStartTimeOfDayArgs(
hours=23,
minutes=30,
seconds=0,
nanos=0,
),
repeat_interval="604800s",
),
notification_config=gcp.storage.TransferJobNotificationConfigArgs(
pubsub_topic=topic.id,
event_types=[
"TRANSFER_OPERATION_SUCCESS",
"TRANSFER_OPERATION_FAILED",
],
payload_format="JSON",
),
opts=pulumi.ResourceOptions(depends_on=[
s3_backup_bucket_bucket_iam_member,
notification_config,
]))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const default = gcp.storage.getTransferProjectServieAccount({
project: _var.project,
});
const s3_backup_bucketBucket = new gcp.storage.Bucket("s3-backup-bucketBucket", {
storageClass: "NEARLINE",
project: _var.project,
location: "US",
});
const s3_backup_bucketBucketIAMMember = new gcp.storage.BucketIAMMember("s3-backup-bucketBucketIAMMember", {
bucket: s3_backup_bucketBucket.name,
role: "roles/storage.admin",
member: _default.then(_default => `serviceAccount:${_default.email}`),
}, {
dependsOn: [s3_backup_bucketBucket],
});
const topic = new gcp.pubsub.Topic("topic", {});
const notificationConfig = new gcp.pubsub.TopicIAMMember("notificationConfig", {
topic: topic.id,
role: "roles/pubsub.publisher",
member: _default.then(_default => `serviceAccount:${_default.email}`),
});
const s3_bucket_nightly_backup = new gcp.storage.TransferJob("s3-bucket-nightly-backup", {
description: "Nightly backup of S3 bucket",
project: _var.project,
transferSpec: {
objectConditions: {
maxTimeElapsedSinceLastModification: "600s",
excludePrefixes: ["requests.gz"],
},
transferOptions: {
deleteObjectsUniqueInSink: false,
},
awsS3DataSource: {
bucketName: _var.aws_s3_bucket,
awsAccessKey: {
accessKeyId: _var.aws_access_key,
secretAccessKey: _var.aws_secret_key,
},
},
gcsDataSink: {
bucketName: s3_backup_bucketBucket.name,
path: "foo/bar/",
},
},
schedule: {
scheduleStartDate: {
year: 2018,
month: 10,
day: 1,
},
scheduleEndDate: {
year: 2019,
month: 1,
day: 15,
},
startTimeOfDay: {
hours: 23,
minutes: 30,
seconds: 0,
nanos: 0,
},
repeatInterval: "604800s",
},
notificationConfig: {
pubsubTopic: topic.id,
eventTypes: [
"TRANSFER_OPERATION_SUCCESS",
"TRANSFER_OPERATION_FAILED",
],
payloadFormat: "JSON",
},
}, {
dependsOn: [
s3_backup_bucketBucketIAMMember,
notificationConfig,
],
});
resources:
s3-backup-bucketBucket:
type: gcp:storage:Bucket
properties:
storageClass: NEARLINE
project: ${var.project}
location: US
s3-backup-bucketBucketIAMMember:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${["s3-backup-bucketBucket"].name}
role: roles/storage.admin
member: serviceAccount:${default.email}
options:
dependson:
- ${["s3-backup-bucketBucket"]}
topic:
type: gcp:pubsub:Topic
notificationConfig:
type: gcp:pubsub:TopicIAMMember
properties:
topic: ${topic.id}
role: roles/pubsub.publisher
member: serviceAccount:${default.email}
s3-bucket-nightly-backup:
type: gcp:storage:TransferJob
properties:
description: Nightly backup of S3 bucket
project: ${var.project}
transferSpec:
objectConditions:
maxTimeElapsedSinceLastModification: 600s
excludePrefixes:
- requests.gz
transferOptions:
deleteObjectsUniqueInSink: false
awsS3DataSource:
bucketName: ${var.aws_s3_bucket}
awsAccessKey:
accessKeyId: ${var.aws_access_key}
secretAccessKey: ${var.aws_secret_key}
gcsDataSink:
bucketName: ${["s3-backup-bucketBucket"].name}
path: foo/bar/
schedule:
scheduleStartDate:
year: 2018
month: 10
day: 1
scheduleEndDate:
year: 2019
month: 1
day: 15
startTimeOfDay:
hours: 23
minutes: 30
seconds: 0
nanos: 0
repeatInterval: 604800s
notificationConfig:
pubsubTopic: ${topic.id}
eventTypes:
- TRANSFER_OPERATION_SUCCESS
- TRANSFER_OPERATION_FAILED
payloadFormat: JSON
options:
dependson:
- ${["s3-backup-bucketBucketIAMMember"]}
- ${notificationConfig}
variables:
default:
fn::invoke:
Function: gcp:storage:getTransferProjectServieAccount
Arguments:
project: ${var.project}
Create TransferJob Resource
new TransferJob(name: string, args: TransferJobArgs, opts?: CustomResourceOptions);
@overload
def TransferJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
description: Optional[str] = None,
notification_config: Optional[TransferJobNotificationConfigArgs] = None,
project: Optional[str] = None,
schedule: Optional[TransferJobScheduleArgs] = None,
status: Optional[str] = None,
transfer_spec: Optional[TransferJobTransferSpecArgs] = None)
@overload
def TransferJob(resource_name: str,
args: TransferJobArgs,
opts: Optional[ResourceOptions] = None)
func NewTransferJob(ctx *Context, name string, args TransferJobArgs, opts ...ResourceOption) (*TransferJob, error)
public TransferJob(string name, TransferJobArgs args, CustomResourceOptions? opts = null)
public TransferJob(String name, TransferJobArgs args)
public TransferJob(String name, TransferJobArgs args, CustomResourceOptions options)
type: gcp:storage:TransferJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args TransferJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args TransferJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args TransferJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args TransferJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args TransferJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
TransferJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The TransferJob resource accepts the following input properties:
- Description string
Unique description to identify the Transfer Job.
- Transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- Notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- Project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- Schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- Status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
- Description string
Unique description to identify the Transfer Job.
- Transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- Notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- Project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- Schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- Status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
- description String
Unique description to identify the Transfer Job.
- transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project String
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status String
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
- description string
Unique description to identify the Transfer Job.
- transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
- description str
Unique description to identify the Transfer Job.
- transfer_
spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- notification_
config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project str
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status str
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
- description String
Unique description to identify the Transfer Job.
- transfer
Spec Property Map Transfer specification. Structure documented below.
- notification
Config Property Map Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project String
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule Property Map
Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status String
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.
Outputs
All input properties are implicitly available as output properties. Additionally, the TransferJob resource produces the following output properties:
- Creation
Time string When the Transfer Job was created.
- Deletion
Time string When the Transfer Job was deleted.
- Id string
The provider-assigned unique ID for this managed resource.
- Last
Modification stringTime When the Transfer Job was last modified.
- Name string
The name of the Transfer Job.
- Creation
Time string When the Transfer Job was created.
- Deletion
Time string When the Transfer Job was deleted.
- Id string
The provider-assigned unique ID for this managed resource.
- Last
Modification stringTime When the Transfer Job was last modified.
- Name string
The name of the Transfer Job.
- creation
Time String When the Transfer Job was created.
- deletion
Time String When the Transfer Job was deleted.
- id String
The provider-assigned unique ID for this managed resource.
- last
Modification StringTime When the Transfer Job was last modified.
- name String
The name of the Transfer Job.
- creation
Time string When the Transfer Job was created.
- deletion
Time string When the Transfer Job was deleted.
- id string
The provider-assigned unique ID for this managed resource.
- last
Modification stringTime When the Transfer Job was last modified.
- name string
The name of the Transfer Job.
- creation_
time str When the Transfer Job was created.
- deletion_
time str When the Transfer Job was deleted.
- id str
The provider-assigned unique ID for this managed resource.
- last_
modification_ strtime When the Transfer Job was last modified.
- name str
The name of the Transfer Job.
- creation
Time String When the Transfer Job was created.
- deletion
Time String When the Transfer Job was deleted.
- id String
The provider-assigned unique ID for this managed resource.
- last
Modification StringTime When the Transfer Job was last modified.
- name String
The name of the Transfer Job.
Look up Existing TransferJob Resource
Get an existing TransferJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: TransferJobState, opts?: CustomResourceOptions): TransferJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
creation_time: Optional[str] = None,
deletion_time: Optional[str] = None,
description: Optional[str] = None,
last_modification_time: Optional[str] = None,
name: Optional[str] = None,
notification_config: Optional[TransferJobNotificationConfigArgs] = None,
project: Optional[str] = None,
schedule: Optional[TransferJobScheduleArgs] = None,
status: Optional[str] = None,
transfer_spec: Optional[TransferJobTransferSpecArgs] = None) -> TransferJob
func GetTransferJob(ctx *Context, name string, id IDInput, state *TransferJobState, opts ...ResourceOption) (*TransferJob, error)
public static TransferJob Get(string name, Input<string> id, TransferJobState? state, CustomResourceOptions? opts = null)
public static TransferJob get(String name, Output<String> id, TransferJobState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Creation
Time string When the Transfer Job was created.
- Deletion
Time string When the Transfer Job was deleted.
- Description string
Unique description to identify the Transfer Job.
- Last
Modification stringTime When the Transfer Job was last modified.
- Name string
The name of the Transfer Job.
- Notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- Project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- Schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- Status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- Transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- Creation
Time string When the Transfer Job was created.
- Deletion
Time string When the Transfer Job was deleted.
- Description string
Unique description to identify the Transfer Job.
- Last
Modification stringTime When the Transfer Job was last modified.
- Name string
The name of the Transfer Job.
- Notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- Project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- Schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- Status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- Transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- creation
Time String When the Transfer Job was created.
- deletion
Time String When the Transfer Job was deleted.
- description String
Unique description to identify the Transfer Job.
- last
Modification StringTime When the Transfer Job was last modified.
- name String
The name of the Transfer Job.
- notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project String
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status String
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- creation
Time string When the Transfer Job was created.
- deletion
Time string When the Transfer Job was deleted.
- description string
Unique description to identify the Transfer Job.
- last
Modification stringTime When the Transfer Job was last modified.
- name string
The name of the Transfer Job.
- notification
Config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project string
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status string
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- transfer
Spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- creation_
time str When the Transfer Job was created.
- deletion_
time str When the Transfer Job was deleted.
- description str
Unique description to identify the Transfer Job.
- last_
modification_ strtime When the Transfer Job was last modified.
- name str
The name of the Transfer Job.
- notification_
config TransferJob Notification Config Args Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project str
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule
Transfer
Job Schedule Args Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status str
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- transfer_
spec TransferJob Transfer Spec Args Transfer specification. Structure documented below.
- creation
Time String When the Transfer Job was created.
- deletion
Time String When the Transfer Job was deleted.
- description String
Unique description to identify the Transfer Job.
- last
Modification StringTime When the Transfer Job was last modified.
- name String
The name of the Transfer Job.
- notification
Config Property Map Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure documented below.
- project String
The project in which the resource belongs. If it is not provided, the provider project is used.
- schedule Property Map
Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run. Structure documented below.
- status String
Status of the job. Default:
ENABLED
. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.- transfer
Spec Property Map Transfer specification. Structure documented below.
Supporting Types
TransferJobNotificationConfig
- Payload
Format string The desired format of the notification message payloads. One of "NONE" or "JSON".
- Pubsub
Topic string The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- Event
Types List<string> Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
- Payload
Format string The desired format of the notification message payloads. One of "NONE" or "JSON".
- Pubsub
Topic string The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- Event
Types []string Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
- payload
Format String The desired format of the notification message payloads. One of "NONE" or "JSON".
- pubsub
Topic String The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- event
Types List<String> Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
- payload
Format string The desired format of the notification message payloads. One of "NONE" or "JSON".
- pubsub
Topic string The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- event
Types string[] Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
- payload_
format str The desired format of the notification message payloads. One of "NONE" or "JSON".
- pubsub_
topic str The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- event_
types Sequence[str] Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
- payload
Format String The desired format of the notification message payloads. One of "NONE" or "JSON".
- pubsub
Topic String The Topic.name of the Pub/Sub topic to which to publish notifications. Must be of the format: projects/{project}/topics/{topic}. Not matching this format results in an INVALID_ARGUMENT error.
- event
Types List<String> Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".
TransferJobSchedule
- Schedule
Start TransferDate Job Schedule Schedule Start Date The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- Repeat
Interval string Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- Schedule
End TransferDate Job Schedule Schedule End Date The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- Start
Time TransferOf Day Job Schedule Start Time Of Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
- Schedule
Start TransferDate Job Schedule Schedule Start Date The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- Repeat
Interval string Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- Schedule
End TransferDate Job Schedule Schedule End Date The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- Start
Time TransferOf Day Job Schedule Start Time Of Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
- schedule
Start TransferDate Job Schedule Schedule Start Date The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- repeat
Interval String Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- schedule
End TransferDate Job Schedule Schedule End Date The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- start
Time TransferOf Day Job Schedule Start Time Of Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
- schedule
Start TransferDate Job Schedule Schedule Start Date The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- repeat
Interval string Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- schedule
End TransferDate Job Schedule Schedule End Date The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- start
Time TransferOf Day Job Schedule Start Time Of Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
- schedule_
start_ Transferdate Job Schedule Schedule Start Date The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- repeat_
interval str Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- schedule_
end_ Transferdate Job Schedule Schedule End Date The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- start_
time_ Transferof_ day Job Schedule Start Time Of Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
- schedule
Start Property MapDate The first day the recurring transfer is scheduled to run. If
schedule_start_date
is in the past, the transfer will run for the first time on the following day. Structure documented below.- repeat
Interval String Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- schedule
End Property MapDate The last day the recurring transfer will be run. If
schedule_end_date
is the same asschedule_start_date
, the transfer will be executed only once. Structure documented below.- start
Time Property MapOf Day The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone. Structure documented below.
TransferJobScheduleScheduleEndDate
TransferJobScheduleScheduleStartDate
TransferJobScheduleStartTimeOfDay
TransferJobTransferSpec
- Aws
S3Data TransferSource Job Transfer Spec Aws S3Data Source An AWS S3 data source. Structure documented below.
- Azure
Blob TransferStorage Data Source Job Transfer Spec Azure Blob Storage Data Source An Azure Blob Storage data source. Structure documented below.
- Gcs
Data TransferSink Job Transfer Spec Gcs Data Sink A Google Cloud Storage data sink. Structure documented below.
- Gcs
Data TransferSource Job Transfer Spec Gcs Data Source A Google Cloud Storage data source. Structure documented below.
- Http
Data TransferSource Job Transfer Spec Http Data Source A HTTP URL data source. Structure documented below.
- Object
Conditions TransferJob Transfer Spec Object Conditions Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- Posix
Data TransferSink Job Transfer Spec Posix Data Sink A POSIX data sink. Structure documented below.
- Posix
Data TransferSource Job Transfer Spec Posix Data Source A POSIX filesystem data source. Structure documented below.
- Sink
Agent stringPool Name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- Source
Agent stringPool Name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- Transfer
Options TransferJob Transfer Spec Transfer Options Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
- Aws
S3Data TransferSource Job Transfer Spec Aws S3Data Source An AWS S3 data source. Structure documented below.
- Azure
Blob TransferStorage Data Source Job Transfer Spec Azure Blob Storage Data Source An Azure Blob Storage data source. Structure documented below.
- Gcs
Data TransferSink Job Transfer Spec Gcs Data Sink A Google Cloud Storage data sink. Structure documented below.
- Gcs
Data TransferSource Job Transfer Spec Gcs Data Source A Google Cloud Storage data source. Structure documented below.
- Http
Data TransferSource Job Transfer Spec Http Data Source A HTTP URL data source. Structure documented below.
- Object
Conditions TransferJob Transfer Spec Object Conditions Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- Posix
Data TransferSink Job Transfer Spec Posix Data Sink A POSIX data sink. Structure documented below.
- Posix
Data TransferSource Job Transfer Spec Posix Data Source A POSIX filesystem data source. Structure documented below.
- Sink
Agent stringPool Name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- Source
Agent stringPool Name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- Transfer
Options TransferJob Transfer Spec Transfer Options Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
- aws
S3Data TransferSource Job Transfer Spec Aws S3Data Source An AWS S3 data source. Structure documented below.
- azure
Blob TransferStorage Data Source Job Transfer Spec Azure Blob Storage Data Source An Azure Blob Storage data source. Structure documented below.
- gcs
Data TransferSink Job Transfer Spec Gcs Data Sink A Google Cloud Storage data sink. Structure documented below.
- gcs
Data TransferSource Job Transfer Spec Gcs Data Source A Google Cloud Storage data source. Structure documented below.
- http
Data TransferSource Job Transfer Spec Http Data Source A HTTP URL data source. Structure documented below.
- object
Conditions TransferJob Transfer Spec Object Conditions Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- posix
Data TransferSink Job Transfer Spec Posix Data Sink A POSIX data sink. Structure documented below.
- posix
Data TransferSource Job Transfer Spec Posix Data Source A POSIX filesystem data source. Structure documented below.
- sink
Agent StringPool Name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- source
Agent StringPool Name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- transfer
Options TransferJob Transfer Spec Transfer Options Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
- aws
S3Data TransferSource Job Transfer Spec Aws S3Data Source An AWS S3 data source. Structure documented below.
- azure
Blob TransferStorage Data Source Job Transfer Spec Azure Blob Storage Data Source An Azure Blob Storage data source. Structure documented below.
- gcs
Data TransferSink Job Transfer Spec Gcs Data Sink A Google Cloud Storage data sink. Structure documented below.
- gcs
Data TransferSource Job Transfer Spec Gcs Data Source A Google Cloud Storage data source. Structure documented below.
- http
Data TransferSource Job Transfer Spec Http Data Source A HTTP URL data source. Structure documented below.
- object
Conditions TransferJob Transfer Spec Object Conditions Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- posix
Data TransferSink Job Transfer Spec Posix Data Sink A POSIX data sink. Structure documented below.
- posix
Data TransferSource Job Transfer Spec Posix Data Source A POSIX filesystem data source. Structure documented below.
- sink
Agent stringPool Name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- source
Agent stringPool Name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- transfer
Options TransferJob Transfer Spec Transfer Options Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
- aws_
s3_ Transferdata_ source Job Transfer Spec Aws S3Data Source An AWS S3 data source. Structure documented below.
- azure_
blob_ Transferstorage_ data_ source Job Transfer Spec Azure Blob Storage Data Source An Azure Blob Storage data source. Structure documented below.
- gcs_
data_ Transfersink Job Transfer Spec Gcs Data Sink A Google Cloud Storage data sink. Structure documented below.
- gcs_
data_ Transfersource Job Transfer Spec Gcs Data Source A Google Cloud Storage data source. Structure documented below.
- http_
data_ Transfersource Job Transfer Spec Http Data Source A HTTP URL data source. Structure documented below.
- object_
conditions TransferJob Transfer Spec Object Conditions Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- posix_
data_ Transfersink Job Transfer Spec Posix Data Sink A POSIX data sink. Structure documented below.
- posix_
data_ Transfersource Job Transfer Spec Posix Data Source A POSIX filesystem data source. Structure documented below.
- sink_
agent_ strpool_ name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- source_
agent_ strpool_ name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- transfer_
options TransferJob Transfer Spec Transfer Options Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
- aws
S3Data Property MapSource An AWS S3 data source. Structure documented below.
- azure
Blob Property MapStorage Data Source An Azure Blob Storage data source. Structure documented below.
- gcs
Data Property MapSink A Google Cloud Storage data sink. Structure documented below.
- gcs
Data Property MapSource A Google Cloud Storage data source. Structure documented below.
- http
Data Property MapSource A HTTP URL data source. Structure documented below.
- object
Conditions Property Map Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects'
last_modification_time
do not exclude objects in a data sink. Structure documented below.- posix
Data Property MapSink A POSIX data sink. Structure documented below.
- posix
Data Property MapSource A POSIX filesystem data source. Structure documented below.
- sink
Agent StringPool Name Specifies the agent pool name associated with the posix data sink. When unspecified, the default name is used.
- source
Agent StringPool Name Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.
- transfer
Options Property Map Characteristics of how to treat files from datasource and sink during job. If the option
delete_objects_unique_in_sink
is true, object conditions based on objects'last_modification_time
are ignored and do not exclude objects in a data source or a data sink. Structure documented below.
TransferJobTransferSpecAwsS3DataSource
- Bucket
Name string Google Cloud Storage bucket name.
- Aws
Access TransferKey Job Transfer Spec Aws S3Data Source Aws Access Key AWS credentials block.
- Role
Arn string The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
- Bucket
Name string Google Cloud Storage bucket name.
- Aws
Access TransferKey Job Transfer Spec Aws S3Data Source Aws Access Key AWS credentials block.
- Role
Arn string The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
- bucket
Name String Google Cloud Storage bucket name.
- aws
Access TransferKey Job Transfer Spec Aws S3Data Source Aws Access Key AWS credentials block.
- role
Arn String The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
- bucket
Name string Google Cloud Storage bucket name.
- aws
Access TransferKey Job Transfer Spec Aws S3Data Source Aws Access Key AWS credentials block.
- role
Arn string The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
- bucket_
name str Google Cloud Storage bucket name.
- aws_
access_ Transferkey Job Transfer Spec Aws S3Data Source Aws Access Key AWS credentials block.
- role_
arn str The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
- bucket
Name String Google Cloud Storage bucket name.
- aws
Access Property MapKey AWS credentials block.
- role
Arn String The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see IAM ARNs. When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.
TransferJobTransferSpecAwsS3DataSourceAwsAccessKey
- Access
Key stringId AWS Key ID.
- Secret
Access stringKey AWS Secret Access Key.
- Access
Key stringId AWS Key ID.
- Secret
Access stringKey AWS Secret Access Key.
- access
Key StringId AWS Key ID.
- secret
Access StringKey AWS Secret Access Key.
- access
Key stringId AWS Key ID.
- secret
Access stringKey AWS Secret Access Key.
- access_
key_ strid AWS Key ID.
- secret_
access_ strkey AWS Secret Access Key.
- access
Key StringId AWS Key ID.
- secret
Access StringKey AWS Secret Access Key.
TransferJobTransferSpecAzureBlobStorageDataSource
- Azure
Credentials TransferJob Transfer Spec Azure Blob Storage Data Source Azure Credentials Credentials used to authenticate API requests to Azure block.
- Container string
The container to transfer from the Azure Storage account.`
- Storage
Account string The name of the Azure Storage account.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- Azure
Credentials TransferJob Transfer Spec Azure Blob Storage Data Source Azure Credentials Credentials used to authenticate API requests to Azure block.
- Container string
The container to transfer from the Azure Storage account.`
- Storage
Account string The name of the Azure Storage account.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- azure
Credentials TransferJob Transfer Spec Azure Blob Storage Data Source Azure Credentials Credentials used to authenticate API requests to Azure block.
- container String
The container to transfer from the Azure Storage account.`
- storage
Account String The name of the Azure Storage account.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- azure
Credentials TransferJob Transfer Spec Azure Blob Storage Data Source Azure Credentials Credentials used to authenticate API requests to Azure block.
- container string
The container to transfer from the Azure Storage account.`
- storage
Account string The name of the Azure Storage account.
- path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- azure_
credentials TransferJob Transfer Spec Azure Blob Storage Data Source Azure Credentials Credentials used to authenticate API requests to Azure block.
- container str
The container to transfer from the Azure Storage account.`
- storage_
account str The name of the Azure Storage account.
- path str
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- azure
Credentials Property Map Credentials used to authenticate API requests to Azure block.
- container String
The container to transfer from the Azure Storage account.`
- storage
Account String The name of the Azure Storage account.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
TransferJobTransferSpecAzureBlobStorageDataSourceAzureCredentials
- Sas
Token string Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
- Sas
Token string Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
- sas
Token String Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
- sas
Token string Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
- sas_
token str Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
- sas
Token String Azure shared access signature. See Grant limited access to Azure Storage resources using shared access signatures (SAS).
TransferJobTransferSpecGcsDataSink
- Bucket
Name string Google Cloud Storage bucket name.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- Bucket
Name string Google Cloud Storage bucket name.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name String Google Cloud Storage bucket name.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name string Google Cloud Storage bucket name.
- path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket_
name str Google Cloud Storage bucket name.
- path str
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name String Google Cloud Storage bucket name.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
TransferJobTransferSpecGcsDataSource
- Bucket
Name string Google Cloud Storage bucket name.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- Bucket
Name string Google Cloud Storage bucket name.
- Path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name String Google Cloud Storage bucket name.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name string Google Cloud Storage bucket name.
- path string
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket_
name str Google Cloud Storage bucket name.
- path str
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
- bucket
Name String Google Cloud Storage bucket name.
- path String
Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.
TransferJobTransferSpecHttpDataSource
- List
Url string The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
- List
Url string The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
- list
Url String The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
- list
Url string The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
- list_
url str The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
- list
Url String The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.
TransferJobTransferSpecObjectConditions
- Exclude
Prefixes List<string> exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- Include
Prefixes List<string> If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- Max
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- Min
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- Exclude
Prefixes []string exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- Include
Prefixes []string If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- Max
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- Min
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- exclude
Prefixes List<String> exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- include
Prefixes List<String> If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- max
Time StringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- min
Time StringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- exclude
Prefixes string[] exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- include
Prefixes string[] If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- max
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- min
Time stringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- exclude_
prefixes Sequence[str] exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- include_
prefixes Sequence[str] If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- max_
time_ strelapsed_ since_ last_ modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- min_
time_ strelapsed_ since_ last_ modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- exclude
Prefixes List<String> exclude_prefixes
must follow the requirements described forinclude_prefixes
. See Requirements.- include
Prefixes List<String> If
include_prefixes
is specified, objects that satisfy the object conditions must have names that start with one of theinclude_prefixes
and that do not start with any of theexclude_prefixes
. Ifinclude_prefixes
is not specified, all objects except those that have names starting with one of theexclude_prefixes
must satisfy the object conditions. See Requirements.- max
Time StringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
- min
Time StringElapsed Since Last Modification A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".
TransferJobTransferSpecPosixDataSink
- Root
Directory string Root directory path to the filesystem.
- Root
Directory string Root directory path to the filesystem.
- root
Directory String Root directory path to the filesystem.
- root
Directory string Root directory path to the filesystem.
- root_
directory str Root directory path to the filesystem.
- root
Directory String Root directory path to the filesystem.
TransferJobTransferSpecPosixDataSource
- Root
Directory string Root directory path to the filesystem.
- Root
Directory string Root directory path to the filesystem.
- root
Directory String Root directory path to the filesystem.
- root
Directory string Root directory path to the filesystem.
- root_
directory str Root directory path to the filesystem.
- root
Directory String Root directory path to the filesystem.
TransferJobTransferSpecTransferOptions
- Delete
Objects boolFrom Source After Transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- Delete
Objects boolUnique In Sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- Overwrite
Objects boolAlready Existing In Sink Whether overwriting objects that already exist in the sink is allowed.
- Overwrite
When string When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
- Delete
Objects boolFrom Source After Transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- Delete
Objects boolUnique In Sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- Overwrite
Objects boolAlready Existing In Sink Whether overwriting objects that already exist in the sink is allowed.
- Overwrite
When string When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
- delete
Objects BooleanFrom Source After Transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- delete
Objects BooleanUnique In Sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- overwrite
Objects BooleanAlready Existing In Sink Whether overwriting objects that already exist in the sink is allowed.
- overwrite
When String When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
- delete
Objects booleanFrom Source After Transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- delete
Objects booleanUnique In Sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- overwrite
Objects booleanAlready Existing In Sink Whether overwriting objects that already exist in the sink is allowed.
- overwrite
When string When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
- delete_
objects_ boolfrom_ source_ after_ transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- delete_
objects_ boolunique_ in_ sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- overwrite_
objects_ boolalready_ existing_ in_ sink Whether overwriting objects that already exist in the sink is allowed.
- overwrite_
when str When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
- delete
Objects BooleanFrom Source After Transfer Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and
delete_objects_unique_in_sink
are mutually exclusive.- delete
Objects BooleanUnique In Sink Whether objects that exist only in the sink should be deleted. Note that this option and
delete_objects_from_source_after_transfer
are mutually exclusive.- overwrite
Objects BooleanAlready Existing In Sink Whether overwriting objects that already exist in the sink is allowed.
- overwrite
When String When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by
overwrite_objects_already_existing_in_sink
. Possible values: ALWAYS, DIFFERENT, NEVER.
Import
Storage buckets can be imported using the Transfer Job’s project
and name
without the transferJob/
prefix, e.g.
$ pulumi import gcp:storage/transferJob:TransferJob nightly-backup-transfer-job my-project-1asd32/8422144862922355674
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
google-beta
Terraform Provider.