1. Packages
  2. MongoDB Atlas
  3. API Docs
  4. CloudBackupSnapshotExportJob
MongoDB Atlas v3.14.3 published on Monday, Apr 1, 2024 by Pulumi

mongodbatlas.CloudBackupSnapshotExportJob

Explore with Pulumi AI

mongodbatlas logo
MongoDB Atlas v3.14.3 published on Monday, Apr 1, 2024 by Pulumi

    mongodbatlas.CloudBackupSnapshotExportJob resource allows you to create a cloud backup snapshot export job for the specified project.

    NOTE: Groups and projects are synonymous terms. You may find groupId in the official documentation.

    Example Usage

    Export one snapshot

    import * as pulumi from "@pulumi/pulumi";
    import * as mongodbatlas from "@pulumi/mongodbatlas";
    
    const testCloudBackupSnapshotExportBucket = new mongodbatlas.CloudBackupSnapshotExportBucket("testCloudBackupSnapshotExportBucket", {
        projectId: "{PROJECT_ID}",
        iamRoleId: "{IAM_ROLE_ID}",
        bucketName: "example_bucket",
        cloudProvider: "AWS",
    });
    const testCloudBackupSnapshotExportJob = new mongodbatlas.CloudBackupSnapshotExportJob("testCloudBackupSnapshotExportJob", {
        projectId: "{PROJECT_ID}",
        clusterName: "{CLUSTER_NAME}",
        snapshotId: "{SNAPSHOT_ID}",
        exportBucketId: testCloudBackupSnapshotExportBucket.exportBucketId,
        customDatas: [{
            key: "exported by",
            value: "myName",
        }],
    });
    
    import pulumi
    import pulumi_mongodbatlas as mongodbatlas
    
    test_cloud_backup_snapshot_export_bucket = mongodbatlas.CloudBackupSnapshotExportBucket("testCloudBackupSnapshotExportBucket",
        project_id="{PROJECT_ID}",
        iam_role_id="{IAM_ROLE_ID}",
        bucket_name="example_bucket",
        cloud_provider="AWS")
    test_cloud_backup_snapshot_export_job = mongodbatlas.CloudBackupSnapshotExportJob("testCloudBackupSnapshotExportJob",
        project_id="{PROJECT_ID}",
        cluster_name="{CLUSTER_NAME}",
        snapshot_id="{SNAPSHOT_ID}",
        export_bucket_id=test_cloud_backup_snapshot_export_bucket.export_bucket_id,
        custom_datas=[mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs(
            key="exported by",
            value="myName",
        )])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-mongodbatlas/sdk/v3/go/mongodbatlas"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		testCloudBackupSnapshotExportBucket, err := mongodbatlas.NewCloudBackupSnapshotExportBucket(ctx, "testCloudBackupSnapshotExportBucket", &mongodbatlas.CloudBackupSnapshotExportBucketArgs{
    			ProjectId:     pulumi.String("{PROJECT_ID}"),
    			IamRoleId:     pulumi.String("{IAM_ROLE_ID}"),
    			BucketName:    pulumi.String("example_bucket"),
    			CloudProvider: pulumi.String("AWS"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = mongodbatlas.NewCloudBackupSnapshotExportJob(ctx, "testCloudBackupSnapshotExportJob", &mongodbatlas.CloudBackupSnapshotExportJobArgs{
    			ProjectId:      pulumi.String("{PROJECT_ID}"),
    			ClusterName:    pulumi.String("{CLUSTER_NAME}"),
    			SnapshotId:     pulumi.String("{SNAPSHOT_ID}"),
    			ExportBucketId: testCloudBackupSnapshotExportBucket.ExportBucketId,
    			CustomDatas: mongodbatlas.CloudBackupSnapshotExportJobCustomDataArray{
    				&mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs{
    					Key:   pulumi.String("exported by"),
    					Value: pulumi.String("myName"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Mongodbatlas = Pulumi.Mongodbatlas;
    
    return await Deployment.RunAsync(() => 
    {
        var testCloudBackupSnapshotExportBucket = new Mongodbatlas.CloudBackupSnapshotExportBucket("testCloudBackupSnapshotExportBucket", new()
        {
            ProjectId = "{PROJECT_ID}",
            IamRoleId = "{IAM_ROLE_ID}",
            BucketName = "example_bucket",
            CloudProvider = "AWS",
        });
    
        var testCloudBackupSnapshotExportJob = new Mongodbatlas.CloudBackupSnapshotExportJob("testCloudBackupSnapshotExportJob", new()
        {
            ProjectId = "{PROJECT_ID}",
            ClusterName = "{CLUSTER_NAME}",
            SnapshotId = "{SNAPSHOT_ID}",
            ExportBucketId = testCloudBackupSnapshotExportBucket.ExportBucketId,
            CustomDatas = new[]
            {
                new Mongodbatlas.Inputs.CloudBackupSnapshotExportJobCustomDataArgs
                {
                    Key = "exported by",
                    Value = "myName",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucket;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucketArgs;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportJob;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportJobArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupSnapshotExportJobCustomDataArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var testCloudBackupSnapshotExportBucket = new CloudBackupSnapshotExportBucket("testCloudBackupSnapshotExportBucket", CloudBackupSnapshotExportBucketArgs.builder()        
                .projectId("{PROJECT_ID}")
                .iamRoleId("{IAM_ROLE_ID}")
                .bucketName("example_bucket")
                .cloudProvider("AWS")
                .build());
    
            var testCloudBackupSnapshotExportJob = new CloudBackupSnapshotExportJob("testCloudBackupSnapshotExportJob", CloudBackupSnapshotExportJobArgs.builder()        
                .projectId("{PROJECT_ID}")
                .clusterName("{CLUSTER_NAME}")
                .snapshotId("{SNAPSHOT_ID}")
                .exportBucketId(testCloudBackupSnapshotExportBucket.exportBucketId())
                .customDatas(CloudBackupSnapshotExportJobCustomDataArgs.builder()
                    .key("exported by")
                    .value("myName")
                    .build())
                .build());
    
        }
    }
    
    resources:
      testCloudBackupSnapshotExportBucket:
        type: mongodbatlas:CloudBackupSnapshotExportBucket
        properties:
          projectId: '{PROJECT_ID}'
          iamRoleId: '{IAM_ROLE_ID}'
          bucketName: example_bucket
          cloudProvider: AWS
      testCloudBackupSnapshotExportJob:
        type: mongodbatlas:CloudBackupSnapshotExportJob
        properties:
          projectId: '{PROJECT_ID}'
          clusterName: '{CLUSTER_NAME}'
          snapshotId: '{SNAPSHOT_ID}'
          exportBucketId: ${testCloudBackupSnapshotExportBucket.exportBucketId}
          customDatas:
            - key: exported by
              value: myName
    

    Create backup and automatic snapshot export policies

    import * as pulumi from "@pulumi/pulumi";
    import * as mongodbatlas from "@pulumi/mongodbatlas";
    
    const _export = new mongodbatlas.CloudBackupSnapshotExportBucket("export", {
        projectId: "{PROJECT_ID}",
        iamRoleId: "{IAM_ROLE_ID}",
        bucketName: "example_bucket",
        cloudProvider: "AWS",
    });
    const backup = new mongodbatlas.CloudBackupSchedule("backup", {
        projectId: "{PROJECT_ID}",
        clusterName: "{CLUSTER_NAME}",
        autoExportEnabled: true,
        "export": {
            exportBucketId: _export.exportBucketId,
            frequencyType: "daily",
        },
        useOrgAndGroupNamesInExportPrefix: true,
        referenceHourOfDay: 7,
        referenceMinuteOfHour: 0,
        restoreWindowDays: 5,
        policyItemHourly: {
            frequencyInterval: 6,
            retentionUnit: "days",
            retentionValue: 7,
        },
        policyItemDaily: {
            frequencyInterval: 1,
            retentionUnit: "days",
            retentionValue: 7,
        },
        policyItemWeeklies: [{
            frequencyInterval: 6,
            retentionUnit: "weeks",
            retentionValue: 4,
        }],
        policyItemMonthlies: [{
            frequencyInterval: 28,
            retentionUnit: "months",
            retentionValue: 12,
        }],
    });
    
    import pulumi
    import pulumi_mongodbatlas as mongodbatlas
    
    export = mongodbatlas.CloudBackupSnapshotExportBucket("export",
        project_id="{PROJECT_ID}",
        iam_role_id="{IAM_ROLE_ID}",
        bucket_name="example_bucket",
        cloud_provider="AWS")
    backup = mongodbatlas.CloudBackupSchedule("backup",
        project_id="{PROJECT_ID}",
        cluster_name="{CLUSTER_NAME}",
        auto_export_enabled=True,
        export=mongodbatlas.CloudBackupScheduleExportArgs(
            export_bucket_id=export.export_bucket_id,
            frequency_type="daily",
        ),
        use_org_and_group_names_in_export_prefix=True,
        reference_hour_of_day=7,
        reference_minute_of_hour=0,
        restore_window_days=5,
        policy_item_hourly=mongodbatlas.CloudBackupSchedulePolicyItemHourlyArgs(
            frequency_interval=6,
            retention_unit="days",
            retention_value=7,
        ),
        policy_item_daily=mongodbatlas.CloudBackupSchedulePolicyItemDailyArgs(
            frequency_interval=1,
            retention_unit="days",
            retention_value=7,
        ),
        policy_item_weeklies=[mongodbatlas.CloudBackupSchedulePolicyItemWeeklyArgs(
            frequency_interval=6,
            retention_unit="weeks",
            retention_value=4,
        )],
        policy_item_monthlies=[mongodbatlas.CloudBackupSchedulePolicyItemMonthlyArgs(
            frequency_interval=28,
            retention_unit="months",
            retention_value=12,
        )])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-mongodbatlas/sdk/v3/go/mongodbatlas"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		export, err := mongodbatlas.NewCloudBackupSnapshotExportBucket(ctx, "export", &mongodbatlas.CloudBackupSnapshotExportBucketArgs{
    			ProjectId:     pulumi.String("{PROJECT_ID}"),
    			IamRoleId:     pulumi.String("{IAM_ROLE_ID}"),
    			BucketName:    pulumi.String("example_bucket"),
    			CloudProvider: pulumi.String("AWS"),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = mongodbatlas.NewCloudBackupSchedule(ctx, "backup", &mongodbatlas.CloudBackupScheduleArgs{
    			ProjectId:         pulumi.String("{PROJECT_ID}"),
    			ClusterName:       pulumi.String("{CLUSTER_NAME}"),
    			AutoExportEnabled: pulumi.Bool(true),
    			Export: &mongodbatlas.CloudBackupScheduleExportArgs{
    				ExportBucketId: export.ExportBucketId,
    				FrequencyType:  pulumi.String("daily"),
    			},
    			UseOrgAndGroupNamesInExportPrefix: pulumi.Bool(true),
    			ReferenceHourOfDay:                pulumi.Int(7),
    			ReferenceMinuteOfHour:             pulumi.Int(0),
    			RestoreWindowDays:                 pulumi.Int(5),
    			PolicyItemHourly: &mongodbatlas.CloudBackupSchedulePolicyItemHourlyArgs{
    				FrequencyInterval: pulumi.Int(6),
    				RetentionUnit:     pulumi.String("days"),
    				RetentionValue:    pulumi.Int(7),
    			},
    			PolicyItemDaily: &mongodbatlas.CloudBackupSchedulePolicyItemDailyArgs{
    				FrequencyInterval: pulumi.Int(1),
    				RetentionUnit:     pulumi.String("days"),
    				RetentionValue:    pulumi.Int(7),
    			},
    			PolicyItemWeeklies: mongodbatlas.CloudBackupSchedulePolicyItemWeeklyArray{
    				&mongodbatlas.CloudBackupSchedulePolicyItemWeeklyArgs{
    					FrequencyInterval: pulumi.Int(6),
    					RetentionUnit:     pulumi.String("weeks"),
    					RetentionValue:    pulumi.Int(4),
    				},
    			},
    			PolicyItemMonthlies: mongodbatlas.CloudBackupSchedulePolicyItemMonthlyArray{
    				&mongodbatlas.CloudBackupSchedulePolicyItemMonthlyArgs{
    					FrequencyInterval: pulumi.Int(28),
    					RetentionUnit:     pulumi.String("months"),
    					RetentionValue:    pulumi.Int(12),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Mongodbatlas = Pulumi.Mongodbatlas;
    
    return await Deployment.RunAsync(() => 
    {
        var export = new Mongodbatlas.CloudBackupSnapshotExportBucket("export", new()
        {
            ProjectId = "{PROJECT_ID}",
            IamRoleId = "{IAM_ROLE_ID}",
            BucketName = "example_bucket",
            CloudProvider = "AWS",
        });
    
        var backup = new Mongodbatlas.CloudBackupSchedule("backup", new()
        {
            ProjectId = "{PROJECT_ID}",
            ClusterName = "{CLUSTER_NAME}",
            AutoExportEnabled = true,
            Export = new Mongodbatlas.Inputs.CloudBackupScheduleExportArgs
            {
                ExportBucketId = export.ExportBucketId,
                FrequencyType = "daily",
            },
            UseOrgAndGroupNamesInExportPrefix = true,
            ReferenceHourOfDay = 7,
            ReferenceMinuteOfHour = 0,
            RestoreWindowDays = 5,
            PolicyItemHourly = new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemHourlyArgs
            {
                FrequencyInterval = 6,
                RetentionUnit = "days",
                RetentionValue = 7,
            },
            PolicyItemDaily = new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemDailyArgs
            {
                FrequencyInterval = 1,
                RetentionUnit = "days",
                RetentionValue = 7,
            },
            PolicyItemWeeklies = new[]
            {
                new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemWeeklyArgs
                {
                    FrequencyInterval = 6,
                    RetentionUnit = "weeks",
                    RetentionValue = 4,
                },
            },
            PolicyItemMonthlies = new[]
            {
                new Mongodbatlas.Inputs.CloudBackupSchedulePolicyItemMonthlyArgs
                {
                    FrequencyInterval = 28,
                    RetentionUnit = "months",
                    RetentionValue = 12,
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucket;
    import com.pulumi.mongodbatlas.CloudBackupSnapshotExportBucketArgs;
    import com.pulumi.mongodbatlas.CloudBackupSchedule;
    import com.pulumi.mongodbatlas.CloudBackupScheduleArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupScheduleExportArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemHourlyArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemDailyArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemWeeklyArgs;
    import com.pulumi.mongodbatlas.inputs.CloudBackupSchedulePolicyItemMonthlyArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var export = new CloudBackupSnapshotExportBucket("export", CloudBackupSnapshotExportBucketArgs.builder()        
                .projectId("{PROJECT_ID}")
                .iamRoleId("{IAM_ROLE_ID}")
                .bucketName("example_bucket")
                .cloudProvider("AWS")
                .build());
    
            var backup = new CloudBackupSchedule("backup", CloudBackupScheduleArgs.builder()        
                .projectId("{PROJECT_ID}")
                .clusterName("{CLUSTER_NAME}")
                .autoExportEnabled(true)
                .export(CloudBackupScheduleExportArgs.builder()
                    .exportBucketId(export.exportBucketId())
                    .frequencyType("daily")
                    .build())
                .useOrgAndGroupNamesInExportPrefix(true)
                .referenceHourOfDay(7)
                .referenceMinuteOfHour(0)
                .restoreWindowDays(5)
                .policyItemHourly(CloudBackupSchedulePolicyItemHourlyArgs.builder()
                    .frequencyInterval(6)
                    .retentionUnit("days")
                    .retentionValue(7)
                    .build())
                .policyItemDaily(CloudBackupSchedulePolicyItemDailyArgs.builder()
                    .frequencyInterval(1)
                    .retentionUnit("days")
                    .retentionValue(7)
                    .build())
                .policyItemWeeklies(CloudBackupSchedulePolicyItemWeeklyArgs.builder()
                    .frequencyInterval(6)
                    .retentionUnit("weeks")
                    .retentionValue(4)
                    .build())
                .policyItemMonthlies(CloudBackupSchedulePolicyItemMonthlyArgs.builder()
                    .frequencyInterval(28)
                    .retentionUnit("months")
                    .retentionValue(12)
                    .build())
                .build());
    
        }
    }
    
    resources:
      export:
        type: mongodbatlas:CloudBackupSnapshotExportBucket
        properties:
          projectId: '{PROJECT_ID}'
          iamRoleId: '{IAM_ROLE_ID}'
          bucketName: example_bucket
          cloudProvider: AWS
      backup:
        type: mongodbatlas:CloudBackupSchedule
        properties:
          projectId: '{PROJECT_ID}'
          clusterName: '{CLUSTER_NAME}'
          autoExportEnabled: true
          export:
            exportBucketId: ${export.exportBucketId}
            frequencyType: daily
          useOrgAndGroupNamesInExportPrefix: true
          referenceHourOfDay: 7
          referenceMinuteOfHour: 0
          restoreWindowDays: 5
          policyItemHourly:
            frequencyInterval: 6
            retentionUnit: days
            retentionValue: 7
          policyItemDaily:
            frequencyInterval: 1
            retentionUnit: days
            retentionValue: 7
          policyItemWeeklies:
            - frequencyInterval: 6
              retentionUnit: weeks
              retentionValue: 4
          policyItemMonthlies:
            - frequencyInterval: 28
              retentionUnit: months
              retentionValue: 12
    

    Create CloudBackupSnapshotExportJob Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new CloudBackupSnapshotExportJob(name: string, args: CloudBackupSnapshotExportJobArgs, opts?: CustomResourceOptions);
    @overload
    def CloudBackupSnapshotExportJob(resource_name: str,
                                     args: CloudBackupSnapshotExportJobArgs,
                                     opts: Optional[ResourceOptions] = None)
    
    @overload
    def CloudBackupSnapshotExportJob(resource_name: str,
                                     opts: Optional[ResourceOptions] = None,
                                     cluster_name: Optional[str] = None,
                                     custom_datas: Optional[Sequence[CloudBackupSnapshotExportJobCustomDataArgs]] = None,
                                     export_bucket_id: Optional[str] = None,
                                     project_id: Optional[str] = None,
                                     snapshot_id: Optional[str] = None)
    func NewCloudBackupSnapshotExportJob(ctx *Context, name string, args CloudBackupSnapshotExportJobArgs, opts ...ResourceOption) (*CloudBackupSnapshotExportJob, error)
    public CloudBackupSnapshotExportJob(string name, CloudBackupSnapshotExportJobArgs args, CustomResourceOptions? opts = null)
    public CloudBackupSnapshotExportJob(String name, CloudBackupSnapshotExportJobArgs args)
    public CloudBackupSnapshotExportJob(String name, CloudBackupSnapshotExportJobArgs args, CustomResourceOptions options)
    
    type: mongodbatlas:CloudBackupSnapshotExportJob
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args CloudBackupSnapshotExportJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args CloudBackupSnapshotExportJobArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args CloudBackupSnapshotExportJobArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args CloudBackupSnapshotExportJobArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args CloudBackupSnapshotExportJobArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var cloudBackupSnapshotExportJobResource = new Mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", new()
    {
        ClusterName = "string",
        CustomDatas = new[]
        {
            new Mongodbatlas.Inputs.CloudBackupSnapshotExportJobCustomDataArgs
            {
                Key = "string",
                Value = "string",
            },
        },
        ExportBucketId = "string",
        ProjectId = "string",
        SnapshotId = "string",
    });
    
    example, err := mongodbatlas.NewCloudBackupSnapshotExportJob(ctx, "cloudBackupSnapshotExportJobResource", &mongodbatlas.CloudBackupSnapshotExportJobArgs{
    	ClusterName: pulumi.String("string"),
    	CustomDatas: mongodbatlas.CloudBackupSnapshotExportJobCustomDataArray{
    		&mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs{
    			Key:   pulumi.String("string"),
    			Value: pulumi.String("string"),
    		},
    	},
    	ExportBucketId: pulumi.String("string"),
    	ProjectId:      pulumi.String("string"),
    	SnapshotId:     pulumi.String("string"),
    })
    
    var cloudBackupSnapshotExportJobResource = new CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", CloudBackupSnapshotExportJobArgs.builder()        
        .clusterName("string")
        .customDatas(CloudBackupSnapshotExportJobCustomDataArgs.builder()
            .key("string")
            .value("string")
            .build())
        .exportBucketId("string")
        .projectId("string")
        .snapshotId("string")
        .build());
    
    cloud_backup_snapshot_export_job_resource = mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource",
        cluster_name="string",
        custom_datas=[mongodbatlas.CloudBackupSnapshotExportJobCustomDataArgs(
            key="string",
            value="string",
        )],
        export_bucket_id="string",
        project_id="string",
        snapshot_id="string")
    
    const cloudBackupSnapshotExportJobResource = new mongodbatlas.CloudBackupSnapshotExportJob("cloudBackupSnapshotExportJobResource", {
        clusterName: "string",
        customDatas: [{
            key: "string",
            value: "string",
        }],
        exportBucketId: "string",
        projectId: "string",
        snapshotId: "string",
    });
    
    type: mongodbatlas:CloudBackupSnapshotExportJob
    properties:
        clusterName: string
        customDatas:
            - key: string
              value: string
        exportBucketId: string
        projectId: string
        snapshotId: string
    

    CloudBackupSnapshotExportJob Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The CloudBackupSnapshotExportJob resource accepts the following input properties:

    ClusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    CustomDatas List<CloudBackupSnapshotExportJobCustomData>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    ExportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    ProjectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    SnapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    ClusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    CustomDatas []CloudBackupSnapshotExportJobCustomDataArgs
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    ExportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    ProjectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    SnapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    clusterName String
    Name of the Atlas cluster whose snapshot you want to export.
    customDatas List<CloudBackupSnapshotExportJobCustomData>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    exportBucketId String
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    projectId String
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId String
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    clusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    customDatas CloudBackupSnapshotExportJobCustomData[]
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    exportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    projectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    cluster_name str
    Name of the Atlas cluster whose snapshot you want to export.
    custom_datas Sequence[CloudBackupSnapshotExportJobCustomDataArgs]
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    export_bucket_id str
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    project_id str
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshot_id str
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    clusterName String
    Name of the Atlas cluster whose snapshot you want to export.
    customDatas List<Property Map>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    exportBucketId String
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    projectId String
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId String
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups

    Outputs

    All input properties are implicitly available as output properties. Additionally, the CloudBackupSnapshotExportJob resource produces the following output properties:

    Components List<CloudBackupSnapshotExportJobComponent>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    CreatedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    ErrMsg string
    Error message, only if the export job failed.
    ExportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    ExportStatusExportedCollections int
    ExportStatusTotalCollections int
    FinishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    Id string
    The provider-assigned unique ID for this managed resource.
    Prefix string
    State string
    Status of the export job. Value can be one of the following:
    Components []CloudBackupSnapshotExportJobComponent
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    CreatedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    ErrMsg string
    Error message, only if the export job failed.
    ExportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    ExportStatusExportedCollections int
    ExportStatusTotalCollections int
    FinishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    Id string
    The provider-assigned unique ID for this managed resource.
    Prefix string
    State string
    Status of the export job. Value can be one of the following:
    components List<CloudBackupSnapshotExportJobComponent>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    errMsg String
    Error message, only if the export job failed.
    exportJobId String
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections Integer
    exportStatusTotalCollections Integer
    finishedAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    id String
    The provider-assigned unique ID for this managed resource.
    prefix String
    state String
    Status of the export job. Value can be one of the following:
    components CloudBackupSnapshotExportJobComponent[]
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    errMsg string
    Error message, only if the export job failed.
    exportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections number
    exportStatusTotalCollections number
    finishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    id string
    The provider-assigned unique ID for this managed resource.
    prefix string
    state string
    Status of the export job. Value can be one of the following:
    components Sequence[CloudBackupSnapshotExportJobComponent]
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    created_at str
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    err_msg str
    Error message, only if the export job failed.
    export_job_id str
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    export_status_exported_collections int
    export_status_total_collections int
    finished_at str
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    id str
    The provider-assigned unique ID for this managed resource.
    prefix str
    state str
    Status of the export job. Value can be one of the following:
    components List<Property Map>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    errMsg String
    Error message, only if the export job failed.
    exportJobId String
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections Number
    exportStatusTotalCollections Number
    finishedAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    id String
    The provider-assigned unique ID for this managed resource.
    prefix String
    state String
    Status of the export job. Value can be one of the following:

    Look up Existing CloudBackupSnapshotExportJob Resource

    Get an existing CloudBackupSnapshotExportJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: CloudBackupSnapshotExportJobState, opts?: CustomResourceOptions): CloudBackupSnapshotExportJob
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            cluster_name: Optional[str] = None,
            components: Optional[Sequence[CloudBackupSnapshotExportJobComponentArgs]] = None,
            created_at: Optional[str] = None,
            custom_datas: Optional[Sequence[CloudBackupSnapshotExportJobCustomDataArgs]] = None,
            err_msg: Optional[str] = None,
            export_bucket_id: Optional[str] = None,
            export_job_id: Optional[str] = None,
            export_status_exported_collections: Optional[int] = None,
            export_status_total_collections: Optional[int] = None,
            finished_at: Optional[str] = None,
            prefix: Optional[str] = None,
            project_id: Optional[str] = None,
            snapshot_id: Optional[str] = None,
            state: Optional[str] = None) -> CloudBackupSnapshotExportJob
    func GetCloudBackupSnapshotExportJob(ctx *Context, name string, id IDInput, state *CloudBackupSnapshotExportJobState, opts ...ResourceOption) (*CloudBackupSnapshotExportJob, error)
    public static CloudBackupSnapshotExportJob Get(string name, Input<string> id, CloudBackupSnapshotExportJobState? state, CustomResourceOptions? opts = null)
    public static CloudBackupSnapshotExportJob get(String name, Output<String> id, CloudBackupSnapshotExportJobState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ClusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    Components List<CloudBackupSnapshotExportJobComponent>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    CreatedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    CustomDatas List<CloudBackupSnapshotExportJobCustomData>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    ErrMsg string
    Error message, only if the export job failed.
    ExportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    ExportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    ExportStatusExportedCollections int
    ExportStatusTotalCollections int
    FinishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    Prefix string
    ProjectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    SnapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    State string
    Status of the export job. Value can be one of the following:
    ClusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    Components []CloudBackupSnapshotExportJobComponentArgs
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    CreatedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    CustomDatas []CloudBackupSnapshotExportJobCustomDataArgs
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    ErrMsg string
    Error message, only if the export job failed.
    ExportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    ExportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    ExportStatusExportedCollections int
    ExportStatusTotalCollections int
    FinishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    Prefix string
    ProjectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    SnapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    State string
    Status of the export job. Value can be one of the following:
    clusterName String
    Name of the Atlas cluster whose snapshot you want to export.
    components List<CloudBackupSnapshotExportJobComponent>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    customDatas List<CloudBackupSnapshotExportJobCustomData>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    errMsg String
    Error message, only if the export job failed.
    exportBucketId String
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    exportJobId String
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections Integer
    exportStatusTotalCollections Integer
    finishedAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    prefix String
    projectId String
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId String
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    state String
    Status of the export job. Value can be one of the following:
    clusterName string
    Name of the Atlas cluster whose snapshot you want to export.
    components CloudBackupSnapshotExportJobComponent[]
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    customDatas CloudBackupSnapshotExportJobCustomData[]
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    errMsg string
    Error message, only if the export job failed.
    exportBucketId string
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    exportJobId string
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections number
    exportStatusTotalCollections number
    finishedAt string
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    prefix string
    projectId string
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId string
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    state string
    Status of the export job. Value can be one of the following:
    cluster_name str
    Name of the Atlas cluster whose snapshot you want to export.
    components Sequence[CloudBackupSnapshotExportJobComponentArgs]
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    created_at str
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    custom_datas Sequence[CloudBackupSnapshotExportJobCustomDataArgs]
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    err_msg str
    Error message, only if the export job failed.
    export_bucket_id str
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    export_job_id str
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    export_status_exported_collections int
    export_status_total_collections int
    finished_at str
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    prefix str
    project_id str
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshot_id str
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    state str
    Status of the export job. Value can be one of the following:
    clusterName String
    Name of the Atlas cluster whose snapshot you want to export.
    components List<Property Map>
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    createdAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job was created.
    customDatas List<Property Map>
    Custom data to include in the metadata file named .complete that Atlas uploads to the bucket when the export job finishes. Custom data can be specified as key and value pairs.
    errMsg String
    Error message, only if the export job failed.
    exportBucketId String
    Unique identifier of the AWS bucket to export the Cloud Backup snapshot to. If necessary, use the Get All Snapshot Export Buckets
    exportJobId String
    Unique identifier of the export job.

    • prefix - Full path on the cloud provider bucket to the folder where the snapshot is exported. The path is in the following format:/exported_snapshots/{ORG-NAME}/{PROJECT-NAME}/{CLUSTER-NAME}/{SNAPSHOT-INITIATION-DATE}/{TIMESTAMP}
    exportStatusExportedCollections Number
    exportStatusTotalCollections Number
    finishedAt String
    Timestamp in ISO 8601 date and time format in UTC when the export job completes.
    prefix String
    projectId String
    Unique 24-hexadecimal digit string that identifies the project which contains the Atlas cluster whose snapshot you want to export.
    snapshotId String
    Unique identifier of the Cloud Backup snapshot to export. If necessary, use the Get All Cloud Backups
    state String
    Status of the export job. Value can be one of the following:

    Supporting Types

    CloudBackupSnapshotExportJobComponent, CloudBackupSnapshotExportJobComponentArgs

    ExportId string
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    ReplicaSetName string
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.
    ExportId string
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    ReplicaSetName string
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.
    exportId String
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    replicaSetName String
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.
    exportId string
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    replicaSetName string
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.
    export_id str
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    replica_set_name str
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.
    exportId String
    Returned for sharded clusters only. Export job details for each replica set in the sharded cluster.
    replicaSetName String
    Returned for sharded clusters only. Unique identifier of the export job for the replica set.

    CloudBackupSnapshotExportJobCustomData, CloudBackupSnapshotExportJobCustomDataArgs

    Key string
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    Value string
    Required if you specify key.
    Key string
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    Value string
    Required if you specify key.
    key String
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    value String
    Required if you specify key.
    key string
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    value string
    Required if you specify key.
    key str
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    value str
    Required if you specify key.
    key String
    Required if you want to include custom data using custom_data in the metadata file uploaded to the bucket. Key to include in the metadata file that Atlas uploads to the bucket when the export job finishes.
    value String
    Required if you specify key.

    Import

    Cloud Backup Snapshot Export Backup entries can be imported using project project_id, cluster_name and export_job_id (Unique identifier of the snapshot export job), in the format PROJECTID-CLUSTERNAME-EXPORTJOBID, e.g.

    $ pulumi import mongodbatlas:index/cloudBackupSnapshotExportJob:CloudBackupSnapshotExportJob test 5d0f1f73cf09a29120e173cf-5d116d82014b764445b2f9b5-5d116d82014b764445b2f9b5
    

    For more information see: MongoDB Atlas API Reference.

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    MongoDB Atlas pulumi/pulumi-mongodbatlas
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the mongodbatlas Terraform Provider.
    mongodbatlas logo
    MongoDB Atlas v3.14.3 published on Monday, Apr 1, 2024 by Pulumi