1. Packages
  2. AWS Classic
  3. API Docs
  4. appflow
  5. Flow

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.32.0 published on Friday, Apr 19, 2024 by Pulumi

aws.appflow.Flow

Explore with Pulumi AI

aws logo

Try AWS Native preview for resources not in the classic version.

AWS Classic v6.32.0 published on Friday, Apr 19, 2024 by Pulumi

    Provides an AppFlow flow resource.

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as aws from "@pulumi/aws";
    
    const exampleSourceBucketV2 = new aws.s3.BucketV2("example_source", {bucket: "example-source"});
    const exampleSource = aws.iam.getPolicyDocument({
        statements: [{
            sid: "AllowAppFlowSourceActions",
            effect: "Allow",
            principals: [{
                type: "Service",
                identifiers: ["appflow.amazonaws.com"],
            }],
            actions: [
                "s3:ListBucket",
                "s3:GetObject",
            ],
            resources: [
                "arn:aws:s3:::example-source",
                "arn:aws:s3:::example-source/*",
            ],
        }],
    });
    const exampleSourceBucketPolicy = new aws.s3.BucketPolicy("example_source", {
        bucket: exampleSourceBucketV2.id,
        policy: exampleSource.then(exampleSource => exampleSource.json),
    });
    const example = new aws.s3.BucketObjectv2("example", {
        bucket: exampleSourceBucketV2.id,
        key: "example_source.csv",
        source: new pulumi.asset.FileAsset("example_source.csv"),
    });
    const exampleDestinationBucketV2 = new aws.s3.BucketV2("example_destination", {bucket: "example-destination"});
    const exampleDestination = aws.iam.getPolicyDocument({
        statements: [{
            sid: "AllowAppFlowDestinationActions",
            effect: "Allow",
            principals: [{
                type: "Service",
                identifiers: ["appflow.amazonaws.com"],
            }],
            actions: [
                "s3:PutObject",
                "s3:AbortMultipartUpload",
                "s3:ListMultipartUploadParts",
                "s3:ListBucketMultipartUploads",
                "s3:GetBucketAcl",
                "s3:PutObjectAcl",
            ],
            resources: [
                "arn:aws:s3:::example-destination",
                "arn:aws:s3:::example-destination/*",
            ],
        }],
    });
    const exampleDestinationBucketPolicy = new aws.s3.BucketPolicy("example_destination", {
        bucket: exampleDestinationBucketV2.id,
        policy: exampleDestination.then(exampleDestination => exampleDestination.json),
    });
    const exampleFlow = new aws.appflow.Flow("example", {
        name: "example",
        sourceFlowConfig: {
            connectorType: "S3",
            sourceConnectorProperties: {
                s3: {
                    bucketName: exampleSourceBucketPolicy.bucket,
                    bucketPrefix: "example",
                },
            },
        },
        destinationFlowConfigs: [{
            connectorType: "S3",
            destinationConnectorProperties: {
                s3: {
                    bucketName: exampleDestinationBucketPolicy.bucket,
                    s3OutputFormatConfig: {
                        prefixConfig: {
                            prefixType: "PATH",
                        },
                    },
                },
            },
        }],
        tasks: [{
            sourceFields: ["exampleField"],
            destinationField: "exampleField",
            taskType: "Map",
            connectorOperators: [{
                s3: "NO_OP",
            }],
        }],
        triggerConfig: {
            triggerType: "OnDemand",
        },
    });
    
    import pulumi
    import pulumi_aws as aws
    
    example_source_bucket_v2 = aws.s3.BucketV2("example_source", bucket="example-source")
    example_source = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
        sid="AllowAppFlowSourceActions",
        effect="Allow",
        principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
            type="Service",
            identifiers=["appflow.amazonaws.com"],
        )],
        actions=[
            "s3:ListBucket",
            "s3:GetObject",
        ],
        resources=[
            "arn:aws:s3:::example-source",
            "arn:aws:s3:::example-source/*",
        ],
    )])
    example_source_bucket_policy = aws.s3.BucketPolicy("example_source",
        bucket=example_source_bucket_v2.id,
        policy=example_source.json)
    example = aws.s3.BucketObjectv2("example",
        bucket=example_source_bucket_v2.id,
        key="example_source.csv",
        source=pulumi.FileAsset("example_source.csv"))
    example_destination_bucket_v2 = aws.s3.BucketV2("example_destination", bucket="example-destination")
    example_destination = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
        sid="AllowAppFlowDestinationActions",
        effect="Allow",
        principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
            type="Service",
            identifiers=["appflow.amazonaws.com"],
        )],
        actions=[
            "s3:PutObject",
            "s3:AbortMultipartUpload",
            "s3:ListMultipartUploadParts",
            "s3:ListBucketMultipartUploads",
            "s3:GetBucketAcl",
            "s3:PutObjectAcl",
        ],
        resources=[
            "arn:aws:s3:::example-destination",
            "arn:aws:s3:::example-destination/*",
        ],
    )])
    example_destination_bucket_policy = aws.s3.BucketPolicy("example_destination",
        bucket=example_destination_bucket_v2.id,
        policy=example_destination.json)
    example_flow = aws.appflow.Flow("example",
        name="example",
        source_flow_config=aws.appflow.FlowSourceFlowConfigArgs(
            connector_type="S3",
            source_connector_properties=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs(
                s3=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args(
                    bucket_name=example_source_bucket_policy.bucket,
                    bucket_prefix="example",
                ),
            ),
        ),
        destination_flow_configs=[aws.appflow.FlowDestinationFlowConfigArgs(
            connector_type="S3",
            destination_connector_properties=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs(
                s3=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args(
                    bucket_name=example_destination_bucket_policy.bucket,
                    s3_output_format_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs(
                        prefix_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs(
                            prefix_type="PATH",
                        ),
                    ),
                ),
            ),
        )],
        tasks=[aws.appflow.FlowTaskArgs(
            source_fields=["exampleField"],
            destination_field="exampleField",
            task_type="Map",
            connector_operators=[aws.appflow.FlowTaskConnectorOperatorArgs(
                s3="NO_OP",
            )],
        )],
        trigger_config=aws.appflow.FlowTriggerConfigArgs(
            trigger_type="OnDemand",
        ))
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/appflow"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
    	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/s3"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleSourceBucketV2, err := s3.NewBucketV2(ctx, "example_source", &s3.BucketV2Args{
    			Bucket: pulumi.String("example-source"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleSource, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
    			Statements: []iam.GetPolicyDocumentStatement{
    				{
    					Sid:    pulumi.StringRef("AllowAppFlowSourceActions"),
    					Effect: pulumi.StringRef("Allow"),
    					Principals: []iam.GetPolicyDocumentStatementPrincipal{
    						{
    							Type: "Service",
    							Identifiers: []string{
    								"appflow.amazonaws.com",
    							},
    						},
    					},
    					Actions: []string{
    						"s3:ListBucket",
    						"s3:GetObject",
    					},
    					Resources: []string{
    						"arn:aws:s3:::example-source",
    						"arn:aws:s3:::example-source/*",
    					},
    				},
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		exampleSourceBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_source", &s3.BucketPolicyArgs{
    			Bucket: exampleSourceBucketV2.ID(),
    			Policy: pulumi.String(exampleSource.Json),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = s3.NewBucketObjectv2(ctx, "example", &s3.BucketObjectv2Args{
    			Bucket: exampleSourceBucketV2.ID(),
    			Key:    pulumi.String("example_source.csv"),
    			Source: pulumi.NewFileAsset("example_source.csv"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleDestinationBucketV2, err := s3.NewBucketV2(ctx, "example_destination", &s3.BucketV2Args{
    			Bucket: pulumi.String("example-destination"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleDestination, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
    			Statements: []iam.GetPolicyDocumentStatement{
    				{
    					Sid:    pulumi.StringRef("AllowAppFlowDestinationActions"),
    					Effect: pulumi.StringRef("Allow"),
    					Principals: []iam.GetPolicyDocumentStatementPrincipal{
    						{
    							Type: "Service",
    							Identifiers: []string{
    								"appflow.amazonaws.com",
    							},
    						},
    					},
    					Actions: []string{
    						"s3:PutObject",
    						"s3:AbortMultipartUpload",
    						"s3:ListMultipartUploadParts",
    						"s3:ListBucketMultipartUploads",
    						"s3:GetBucketAcl",
    						"s3:PutObjectAcl",
    					},
    					Resources: []string{
    						"arn:aws:s3:::example-destination",
    						"arn:aws:s3:::example-destination/*",
    					},
    				},
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		exampleDestinationBucketPolicy, err := s3.NewBucketPolicy(ctx, "example_destination", &s3.BucketPolicyArgs{
    			Bucket: exampleDestinationBucketV2.ID(),
    			Policy: pulumi.String(exampleDestination.Json),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = appflow.NewFlow(ctx, "example", &appflow.FlowArgs{
    			Name: pulumi.String("example"),
    			SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
    				ConnectorType: pulumi.String("S3"),
    				SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
    					S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
    						BucketName:   exampleSourceBucketPolicy.Bucket,
    						BucketPrefix: pulumi.String("example"),
    					},
    				},
    			},
    			DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
    				&appflow.FlowDestinationFlowConfigArgs{
    					ConnectorType: pulumi.String("S3"),
    					DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
    						S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
    							BucketName: exampleDestinationBucketPolicy.Bucket,
    							S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
    								PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
    									PrefixType: pulumi.String("PATH"),
    								},
    							},
    						},
    					},
    				},
    			},
    			Tasks: appflow.FlowTaskArray{
    				&appflow.FlowTaskArgs{
    					SourceFields: pulumi.StringArray{
    						pulumi.String("exampleField"),
    					},
    					DestinationField: pulumi.String("exampleField"),
    					TaskType:         pulumi.String("Map"),
    					ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
    						&appflow.FlowTaskConnectorOperatorArgs{
    							S3: pulumi.String("NO_OP"),
    						},
    					},
    				},
    			},
    			TriggerConfig: &appflow.FlowTriggerConfigArgs{
    				TriggerType: pulumi.String("OnDemand"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aws = Pulumi.Aws;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleSourceBucketV2 = new Aws.S3.BucketV2("example_source", new()
        {
            Bucket = "example-source",
        });
    
        var exampleSource = Aws.Iam.GetPolicyDocument.Invoke(new()
        {
            Statements = new[]
            {
                new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
                {
                    Sid = "AllowAppFlowSourceActions",
                    Effect = "Allow",
                    Principals = new[]
                    {
                        new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                        {
                            Type = "Service",
                            Identifiers = new[]
                            {
                                "appflow.amazonaws.com",
                            },
                        },
                    },
                    Actions = new[]
                    {
                        "s3:ListBucket",
                        "s3:GetObject",
                    },
                    Resources = new[]
                    {
                        "arn:aws:s3:::example-source",
                        "arn:aws:s3:::example-source/*",
                    },
                },
            },
        });
    
        var exampleSourceBucketPolicy = new Aws.S3.BucketPolicy("example_source", new()
        {
            Bucket = exampleSourceBucketV2.Id,
            Policy = exampleSource.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
        });
    
        var example = new Aws.S3.BucketObjectv2("example", new()
        {
            Bucket = exampleSourceBucketV2.Id,
            Key = "example_source.csv",
            Source = new FileAsset("example_source.csv"),
        });
    
        var exampleDestinationBucketV2 = new Aws.S3.BucketV2("example_destination", new()
        {
            Bucket = "example-destination",
        });
    
        var exampleDestination = Aws.Iam.GetPolicyDocument.Invoke(new()
        {
            Statements = new[]
            {
                new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
                {
                    Sid = "AllowAppFlowDestinationActions",
                    Effect = "Allow",
                    Principals = new[]
                    {
                        new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
                        {
                            Type = "Service",
                            Identifiers = new[]
                            {
                                "appflow.amazonaws.com",
                            },
                        },
                    },
                    Actions = new[]
                    {
                        "s3:PutObject",
                        "s3:AbortMultipartUpload",
                        "s3:ListMultipartUploadParts",
                        "s3:ListBucketMultipartUploads",
                        "s3:GetBucketAcl",
                        "s3:PutObjectAcl",
                    },
                    Resources = new[]
                    {
                        "arn:aws:s3:::example-destination",
                        "arn:aws:s3:::example-destination/*",
                    },
                },
            },
        });
    
        var exampleDestinationBucketPolicy = new Aws.S3.BucketPolicy("example_destination", new()
        {
            Bucket = exampleDestinationBucketV2.Id,
            Policy = exampleDestination.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
        });
    
        var exampleFlow = new Aws.AppFlow.Flow("example", new()
        {
            Name = "example",
            SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
            {
                ConnectorType = "S3",
                SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
                {
                    S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
                    {
                        BucketName = exampleSourceBucketPolicy.Bucket,
                        BucketPrefix = "example",
                    },
                },
            },
            DestinationFlowConfigs = new[]
            {
                new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
                {
                    ConnectorType = "S3",
                    DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
                    {
                        S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                        {
                            BucketName = exampleDestinationBucketPolicy.Bucket,
                            S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                            {
                                PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                                {
                                    PrefixType = "PATH",
                                },
                            },
                        },
                    },
                },
            },
            Tasks = new[]
            {
                new Aws.AppFlow.Inputs.FlowTaskArgs
                {
                    SourceFields = new[]
                    {
                        "exampleField",
                    },
                    DestinationField = "exampleField",
                    TaskType = "Map",
                    ConnectorOperators = new[]
                    {
                        new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                        {
                            S3 = "NO_OP",
                        },
                    },
                },
            },
            TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
            {
                TriggerType = "OnDemand",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aws.s3.BucketV2;
    import com.pulumi.aws.s3.BucketV2Args;
    import com.pulumi.aws.iam.IamFunctions;
    import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
    import com.pulumi.aws.s3.BucketPolicy;
    import com.pulumi.aws.s3.BucketPolicyArgs;
    import com.pulumi.aws.s3.BucketObjectv2;
    import com.pulumi.aws.s3.BucketObjectv2Args;
    import com.pulumi.aws.appflow.Flow;
    import com.pulumi.aws.appflow.FlowArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs;
    import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs;
    import com.pulumi.aws.appflow.inputs.FlowTaskArgs;
    import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs;
    import com.pulumi.asset.FileAsset;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleSourceBucketV2 = new BucketV2("exampleSourceBucketV2", BucketV2Args.builder()        
                .bucket("example-source")
                .build());
    
            final var exampleSource = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
                .statements(GetPolicyDocumentStatementArgs.builder()
                    .sid("AllowAppFlowSourceActions")
                    .effect("Allow")
                    .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                        .type("Service")
                        .identifiers("appflow.amazonaws.com")
                        .build())
                    .actions(                
                        "s3:ListBucket",
                        "s3:GetObject")
                    .resources(                
                        "arn:aws:s3:::example-source",
                        "arn:aws:s3:::example-source/*")
                    .build())
                .build());
    
            var exampleSourceBucketPolicy = new BucketPolicy("exampleSourceBucketPolicy", BucketPolicyArgs.builder()        
                .bucket(exampleSourceBucketV2.id())
                .policy(exampleSource.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
                .build());
    
            var example = new BucketObjectv2("example", BucketObjectv2Args.builder()        
                .bucket(exampleSourceBucketV2.id())
                .key("example_source.csv")
                .source(new FileAsset("example_source.csv"))
                .build());
    
            var exampleDestinationBucketV2 = new BucketV2("exampleDestinationBucketV2", BucketV2Args.builder()        
                .bucket("example-destination")
                .build());
    
            final var exampleDestination = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
                .statements(GetPolicyDocumentStatementArgs.builder()
                    .sid("AllowAppFlowDestinationActions")
                    .effect("Allow")
                    .principals(GetPolicyDocumentStatementPrincipalArgs.builder()
                        .type("Service")
                        .identifiers("appflow.amazonaws.com")
                        .build())
                    .actions(                
                        "s3:PutObject",
                        "s3:AbortMultipartUpload",
                        "s3:ListMultipartUploadParts",
                        "s3:ListBucketMultipartUploads",
                        "s3:GetBucketAcl",
                        "s3:PutObjectAcl")
                    .resources(                
                        "arn:aws:s3:::example-destination",
                        "arn:aws:s3:::example-destination/*")
                    .build())
                .build());
    
            var exampleDestinationBucketPolicy = new BucketPolicy("exampleDestinationBucketPolicy", BucketPolicyArgs.builder()        
                .bucket(exampleDestinationBucketV2.id())
                .policy(exampleDestination.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
                .build());
    
            var exampleFlow = new Flow("exampleFlow", FlowArgs.builder()        
                .name("example")
                .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
                    .connectorType("S3")
                    .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
                        .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                            .bucketName(exampleSourceBucketPolicy.bucket())
                            .bucketPrefix("example")
                            .build())
                        .build())
                    .build())
                .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
                    .connectorType("S3")
                    .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
                        .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                            .bucketName(exampleDestinationBucketPolicy.bucket())
                            .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                                .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                                    .prefixType("PATH")
                                    .build())
                                .build())
                            .build())
                        .build())
                    .build())
                .tasks(FlowTaskArgs.builder()
                    .sourceFields("exampleField")
                    .destinationField("exampleField")
                    .taskType("Map")
                    .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
                        .s3("NO_OP")
                        .build())
                    .build())
                .triggerConfig(FlowTriggerConfigArgs.builder()
                    .triggerType("OnDemand")
                    .build())
                .build());
    
        }
    }
    
    resources:
      exampleSourceBucketV2:
        type: aws:s3:BucketV2
        name: example_source
        properties:
          bucket: example-source
      exampleSourceBucketPolicy:
        type: aws:s3:BucketPolicy
        name: example_source
        properties:
          bucket: ${exampleSourceBucketV2.id}
          policy: ${exampleSource.json}
      example:
        type: aws:s3:BucketObjectv2
        properties:
          bucket: ${exampleSourceBucketV2.id}
          key: example_source.csv
          source:
            fn::FileAsset: example_source.csv
      exampleDestinationBucketV2:
        type: aws:s3:BucketV2
        name: example_destination
        properties:
          bucket: example-destination
      exampleDestinationBucketPolicy:
        type: aws:s3:BucketPolicy
        name: example_destination
        properties:
          bucket: ${exampleDestinationBucketV2.id}
          policy: ${exampleDestination.json}
      exampleFlow:
        type: aws:appflow:Flow
        name: example
        properties:
          name: example
          sourceFlowConfig:
            connectorType: S3
            sourceConnectorProperties:
              s3:
                bucketName: ${exampleSourceBucketPolicy.bucket}
                bucketPrefix: example
          destinationFlowConfigs:
            - connectorType: S3
              destinationConnectorProperties:
                s3:
                  bucketName: ${exampleDestinationBucketPolicy.bucket}
                  s3OutputFormatConfig:
                    prefixConfig:
                      prefixType: PATH
          tasks:
            - sourceFields:
                - exampleField
              destinationField: exampleField
              taskType: Map
              connectorOperators:
                - s3: NO_OP
          triggerConfig:
            triggerType: OnDemand
    variables:
      exampleSource:
        fn::invoke:
          Function: aws:iam:getPolicyDocument
          Arguments:
            statements:
              - sid: AllowAppFlowSourceActions
                effect: Allow
                principals:
                  - type: Service
                    identifiers:
                      - appflow.amazonaws.com
                actions:
                  - s3:ListBucket
                  - s3:GetObject
                resources:
                  - arn:aws:s3:::example-source
                  - arn:aws:s3:::example-source/*
      exampleDestination:
        fn::invoke:
          Function: aws:iam:getPolicyDocument
          Arguments:
            statements:
              - sid: AllowAppFlowDestinationActions
                effect: Allow
                principals:
                  - type: Service
                    identifiers:
                      - appflow.amazonaws.com
                actions:
                  - s3:PutObject
                  - s3:AbortMultipartUpload
                  - s3:ListMultipartUploadParts
                  - s3:ListBucketMultipartUploads
                  - s3:GetBucketAcl
                  - s3:PutObjectAcl
                resources:
                  - arn:aws:s3:::example-destination
                  - arn:aws:s3:::example-destination/*
    

    Create Flow Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Flow(name: string, args: FlowArgs, opts?: CustomResourceOptions);
    @overload
    def Flow(resource_name: str,
             args: FlowArgs,
             opts: Optional[ResourceOptions] = None)
    
    @overload
    def Flow(resource_name: str,
             opts: Optional[ResourceOptions] = None,
             destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
             source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
             tasks: Optional[Sequence[FlowTaskArgs]] = None,
             trigger_config: Optional[FlowTriggerConfigArgs] = None,
             description: Optional[str] = None,
             kms_arn: Optional[str] = None,
             name: Optional[str] = None,
             tags: Optional[Mapping[str, str]] = None)
    func NewFlow(ctx *Context, name string, args FlowArgs, opts ...ResourceOption) (*Flow, error)
    public Flow(string name, FlowArgs args, CustomResourceOptions? opts = null)
    public Flow(String name, FlowArgs args)
    public Flow(String name, FlowArgs args, CustomResourceOptions options)
    
    type: aws:appflow:Flow
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args FlowArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var flowResource = new Aws.AppFlow.Flow("flowResource", new()
    {
        DestinationFlowConfigs = new[]
        {
            new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
            {
                ConnectorType = "string",
                DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
                {
                    CustomConnector = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
                    {
                        EntityName = "string",
                        CustomProperties = 
                        {
                            { "string", "string" },
                        },
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                    CustomerProfiles = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
                    {
                        DomainName = "string",
                        ObjectTypeName = "string",
                    },
                    EventBridge = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Honeycode = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    LookoutMetrics = null,
                    Marketo = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Redshift = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
                    {
                        IntermediateBucketName = "string",
                        Object = "string",
                        BucketPrefix = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
                    {
                        BucketName = "string",
                        BucketPrefix = "string",
                        S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
                        {
                            AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
                            {
                                AggregationType = "string",
                                TargetFileSize = 0,
                            },
                            FileType = "string",
                            PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
                            {
                                PrefixFormat = "string",
                                PrefixType = "string",
                            },
                            PreserveSourceDataTyping = false,
                        },
                    },
                    Salesforce = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                    SapoData = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
                    {
                        ObjectPath = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        SuccessResponseHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                        },
                        WriteOperationType = "string",
                    },
                    Snowflake = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
                    {
                        IntermediateBucketName = "string",
                        Object = "string",
                        BucketPrefix = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                    },
                    Upsolver = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
                    {
                        BucketName = "string",
                        S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
                        {
                            PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
                            {
                                PrefixType = "string",
                                PrefixFormat = "string",
                            },
                            AggregationConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
                            {
                                AggregationType = "string",
                            },
                            FileType = "string",
                        },
                        BucketPrefix = "string",
                    },
                    Zendesk = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
                    {
                        Object = "string",
                        ErrorHandlingConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
                        {
                            BucketName = "string",
                            BucketPrefix = "string",
                            FailOnFirstDestinationError = false,
                        },
                        IdFieldNames = new[]
                        {
                            "string",
                        },
                        WriteOperationType = "string",
                    },
                },
                ApiVersion = "string",
                ConnectorProfileName = "string",
            },
        },
        SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
        {
            ConnectorType = "string",
            SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
            {
                Amplitude = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
                {
                    Object = "string",
                },
                CustomConnector = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
                {
                    EntityName = "string",
                    CustomProperties = 
                    {
                        { "string", "string" },
                    },
                },
                Datadog = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
                {
                    Object = "string",
                },
                Dynatrace = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
                {
                    Object = "string",
                },
                GoogleAnalytics = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
                {
                    Object = "string",
                },
                InforNexus = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
                {
                    Object = "string",
                },
                Marketo = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
                {
                    Object = "string",
                },
                S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
                {
                    BucketName = "string",
                    BucketPrefix = "string",
                    S3InputFormatConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
                    {
                        S3InputFileType = "string",
                    },
                },
                Salesforce = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
                {
                    Object = "string",
                    EnableDynamicFieldUpdate = false,
                    IncludeDeletedRecords = false,
                },
                SapoData = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
                {
                    ObjectPath = "string",
                },
                ServiceNow = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
                {
                    Object = "string",
                },
                Singular = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
                {
                    Object = "string",
                },
                Slack = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
                {
                    Object = "string",
                },
                Trendmicro = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
                {
                    Object = "string",
                },
                Veeva = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
                {
                    Object = "string",
                    DocumentType = "string",
                    IncludeAllVersions = false,
                    IncludeRenditions = false,
                    IncludeSourceFiles = false,
                },
                Zendesk = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
                {
                    Object = "string",
                },
            },
            ApiVersion = "string",
            ConnectorProfileName = "string",
            IncrementalPullConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigIncrementalPullConfigArgs
            {
                DatetimeTypeFieldName = "string",
            },
        },
        Tasks = new[]
        {
            new Aws.AppFlow.Inputs.FlowTaskArgs
            {
                TaskType = "string",
                ConnectorOperators = new[]
                {
                    new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
                    {
                        Amplitude = "string",
                        CustomConnector = "string",
                        Datadog = "string",
                        Dynatrace = "string",
                        GoogleAnalytics = "string",
                        InforNexus = "string",
                        Marketo = "string",
                        S3 = "string",
                        Salesforce = "string",
                        SapoData = "string",
                        ServiceNow = "string",
                        Singular = "string",
                        Slack = "string",
                        Trendmicro = "string",
                        Veeva = "string",
                        Zendesk = "string",
                    },
                },
                DestinationField = "string",
                SourceFields = new[]
                {
                    "string",
                },
                TaskProperties = 
                {
                    { "string", "string" },
                },
            },
        },
        TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
        {
            TriggerType = "string",
            TriggerProperties = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesArgs
            {
                Scheduled = new Aws.AppFlow.Inputs.FlowTriggerConfigTriggerPropertiesScheduledArgs
                {
                    ScheduleExpression = "string",
                    DataPullMode = "string",
                    FirstExecutionFrom = "string",
                    ScheduleEndTime = "string",
                    ScheduleOffset = 0,
                    ScheduleStartTime = "string",
                    Timezone = "string",
                },
            },
        },
        Description = "string",
        KmsArn = "string",
        Name = "string",
        Tags = 
        {
            { "string", "string" },
        },
    });
    
    example, err := appflow.NewFlow(ctx, "flowResource", &appflow.FlowArgs{
    	DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
    		&appflow.FlowDestinationFlowConfigArgs{
    			ConnectorType: pulumi.String("string"),
    			DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
    				CustomConnector: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs{
    					EntityName: pulumi.String("string"),
    					CustomProperties: pulumi.StringMap{
    						"string": pulumi.String("string"),
    					},
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				CustomerProfiles: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs{
    					DomainName:     pulumi.String("string"),
    					ObjectTypeName: pulumi.String("string"),
    				},
    				EventBridge: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Honeycode: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				LookoutMetrics: nil,
    				Marketo: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Redshift: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs{
    					IntermediateBucketName: pulumi.String("string"),
    					Object:                 pulumi.String("string"),
    					BucketPrefix:           pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
    					BucketName:   pulumi.String("string"),
    					BucketPrefix: pulumi.String("string"),
    					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
    						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs{
    							AggregationType: pulumi.String("string"),
    							TargetFileSize:  pulumi.Int(0),
    						},
    						FileType: pulumi.String("string"),
    						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
    							PrefixFormat: pulumi.String("string"),
    							PrefixType:   pulumi.String("string"),
    						},
    						PreserveSourceDataTyping: pulumi.Bool(false),
    					},
    				},
    				Salesforce: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				SapoData: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs{
    					ObjectPath: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					SuccessResponseHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs{
    						BucketName:   pulumi.String("string"),
    						BucketPrefix: pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    				Snowflake: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs{
    					IntermediateBucketName: pulumi.String("string"),
    					Object:                 pulumi.String("string"),
    					BucketPrefix:           pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    				},
    				Upsolver: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs{
    					BucketName: pulumi.String("string"),
    					S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs{
    						PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs{
    							PrefixType:   pulumi.String("string"),
    							PrefixFormat: pulumi.String("string"),
    						},
    						AggregationConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs{
    							AggregationType: pulumi.String("string"),
    						},
    						FileType: pulumi.String("string"),
    					},
    					BucketPrefix: pulumi.String("string"),
    				},
    				Zendesk: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs{
    					Object: pulumi.String("string"),
    					ErrorHandlingConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs{
    						BucketName:                  pulumi.String("string"),
    						BucketPrefix:                pulumi.String("string"),
    						FailOnFirstDestinationError: pulumi.Bool(false),
    					},
    					IdFieldNames: pulumi.StringArray{
    						pulumi.String("string"),
    					},
    					WriteOperationType: pulumi.String("string"),
    				},
    			},
    			ApiVersion:           pulumi.String("string"),
    			ConnectorProfileName: pulumi.String("string"),
    		},
    	},
    	SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
    		ConnectorType: pulumi.String("string"),
    		SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
    			Amplitude: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs{
    				Object: pulumi.String("string"),
    			},
    			CustomConnector: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs{
    				EntityName: pulumi.String("string"),
    				CustomProperties: pulumi.StringMap{
    					"string": pulumi.String("string"),
    				},
    			},
    			Datadog: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs{
    				Object: pulumi.String("string"),
    			},
    			Dynatrace: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs{
    				Object: pulumi.String("string"),
    			},
    			GoogleAnalytics: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs{
    				Object: pulumi.String("string"),
    			},
    			InforNexus: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs{
    				Object: pulumi.String("string"),
    			},
    			Marketo: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs{
    				Object: pulumi.String("string"),
    			},
    			S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
    				BucketName:   pulumi.String("string"),
    				BucketPrefix: pulumi.String("string"),
    				S3InputFormatConfig: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs{
    					S3InputFileType: pulumi.String("string"),
    				},
    			},
    			Salesforce: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs{
    				Object:                   pulumi.String("string"),
    				EnableDynamicFieldUpdate: pulumi.Bool(false),
    				IncludeDeletedRecords:    pulumi.Bool(false),
    			},
    			SapoData: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs{
    				ObjectPath: pulumi.String("string"),
    			},
    			ServiceNow: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs{
    				Object: pulumi.String("string"),
    			},
    			Singular: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs{
    				Object: pulumi.String("string"),
    			},
    			Slack: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs{
    				Object: pulumi.String("string"),
    			},
    			Trendmicro: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs{
    				Object: pulumi.String("string"),
    			},
    			Veeva: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs{
    				Object:             pulumi.String("string"),
    				DocumentType:       pulumi.String("string"),
    				IncludeAllVersions: pulumi.Bool(false),
    				IncludeRenditions:  pulumi.Bool(false),
    				IncludeSourceFiles: pulumi.Bool(false),
    			},
    			Zendesk: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs{
    				Object: pulumi.String("string"),
    			},
    		},
    		ApiVersion:           pulumi.String("string"),
    		ConnectorProfileName: pulumi.String("string"),
    		IncrementalPullConfig: &appflow.FlowSourceFlowConfigIncrementalPullConfigArgs{
    			DatetimeTypeFieldName: pulumi.String("string"),
    		},
    	},
    	Tasks: appflow.FlowTaskArray{
    		&appflow.FlowTaskArgs{
    			TaskType: pulumi.String("string"),
    			ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
    				&appflow.FlowTaskConnectorOperatorArgs{
    					Amplitude:       pulumi.String("string"),
    					CustomConnector: pulumi.String("string"),
    					Datadog:         pulumi.String("string"),
    					Dynatrace:       pulumi.String("string"),
    					GoogleAnalytics: pulumi.String("string"),
    					InforNexus:      pulumi.String("string"),
    					Marketo:         pulumi.String("string"),
    					S3:              pulumi.String("string"),
    					Salesforce:      pulumi.String("string"),
    					SapoData:        pulumi.String("string"),
    					ServiceNow:      pulumi.String("string"),
    					Singular:        pulumi.String("string"),
    					Slack:           pulumi.String("string"),
    					Trendmicro:      pulumi.String("string"),
    					Veeva:           pulumi.String("string"),
    					Zendesk:         pulumi.String("string"),
    				},
    			},
    			DestinationField: pulumi.String("string"),
    			SourceFields: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			TaskProperties: pulumi.StringMap{
    				"string": pulumi.String("string"),
    			},
    		},
    	},
    	TriggerConfig: &appflow.FlowTriggerConfigArgs{
    		TriggerType: pulumi.String("string"),
    		TriggerProperties: &appflow.FlowTriggerConfigTriggerPropertiesArgs{
    			Scheduled: &appflow.FlowTriggerConfigTriggerPropertiesScheduledArgs{
    				ScheduleExpression: pulumi.String("string"),
    				DataPullMode:       pulumi.String("string"),
    				FirstExecutionFrom: pulumi.String("string"),
    				ScheduleEndTime:    pulumi.String("string"),
    				ScheduleOffset:     pulumi.Int(0),
    				ScheduleStartTime:  pulumi.String("string"),
    				Timezone:           pulumi.String("string"),
    			},
    		},
    	},
    	Description: pulumi.String("string"),
    	KmsArn:      pulumi.String("string"),
    	Name:        pulumi.String("string"),
    	Tags: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    })
    
    var flowResource = new Flow("flowResource", FlowArgs.builder()        
        .destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
            .connectorType("string")
            .destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
                .customConnector(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs.builder()
                    .entityName("string")
                    .customProperties(Map.of("string", "string"))
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .customerProfiles(FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs.builder()
                    .domainName("string")
                    .objectTypeName("string")
                    .build())
                .eventBridge(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .honeycode(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .lookoutMetrics()
                .marketo(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .redshift(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs.builder()
                    .intermediateBucketName("string")
                    .object("string")
                    .bucketPrefix("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
                        .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs.builder()
                            .aggregationType("string")
                            .targetFileSize(0)
                            .build())
                        .fileType("string")
                        .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
                            .prefixFormat("string")
                            .prefixType("string")
                            .build())
                        .preserveSourceDataTyping(false)
                        .build())
                    .build())
                .salesforce(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .sapoData(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs.builder()
                    .objectPath("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .successResponseHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .build())
                    .writeOperationType("string")
                    .build())
                .snowflake(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs.builder()
                    .intermediateBucketName("string")
                    .object("string")
                    .bucketPrefix("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .build())
                .upsolver(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs.builder()
                    .bucketName("string")
                    .s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs.builder()
                        .prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs.builder()
                            .prefixType("string")
                            .prefixFormat("string")
                            .build())
                        .aggregationConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs.builder()
                            .aggregationType("string")
                            .build())
                        .fileType("string")
                        .build())
                    .bucketPrefix("string")
                    .build())
                .zendesk(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs.builder()
                    .object("string")
                    .errorHandlingConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs.builder()
                        .bucketName("string")
                        .bucketPrefix("string")
                        .failOnFirstDestinationError(false)
                        .build())
                    .idFieldNames("string")
                    .writeOperationType("string")
                    .build())
                .build())
            .apiVersion("string")
            .connectorProfileName("string")
            .build())
        .sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
            .connectorType("string")
            .sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
                .amplitude(FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs.builder()
                    .object("string")
                    .build())
                .customConnector(FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs.builder()
                    .entityName("string")
                    .customProperties(Map.of("string", "string"))
                    .build())
                .datadog(FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs.builder()
                    .object("string")
                    .build())
                .dynatrace(FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs.builder()
                    .object("string")
                    .build())
                .googleAnalytics(FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs.builder()
                    .object("string")
                    .build())
                .inforNexus(FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs.builder()
                    .object("string")
                    .build())
                .marketo(FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs.builder()
                    .object("string")
                    .build())
                .s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
                    .bucketName("string")
                    .bucketPrefix("string")
                    .s3InputFormatConfig(FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs.builder()
                        .s3InputFileType("string")
                        .build())
                    .build())
                .salesforce(FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs.builder()
                    .object("string")
                    .enableDynamicFieldUpdate(false)
                    .includeDeletedRecords(false)
                    .build())
                .sapoData(FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs.builder()
                    .objectPath("string")
                    .build())
                .serviceNow(FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs.builder()
                    .object("string")
                    .build())
                .singular(FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs.builder()
                    .object("string")
                    .build())
                .slack(FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs.builder()
                    .object("string")
                    .build())
                .trendmicro(FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs.builder()
                    .object("string")
                    .build())
                .veeva(FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs.builder()
                    .object("string")
                    .documentType("string")
                    .includeAllVersions(false)
                    .includeRenditions(false)
                    .includeSourceFiles(false)
                    .build())
                .zendesk(FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs.builder()
                    .object("string")
                    .build())
                .build())
            .apiVersion("string")
            .connectorProfileName("string")
            .incrementalPullConfig(FlowSourceFlowConfigIncrementalPullConfigArgs.builder()
                .datetimeTypeFieldName("string")
                .build())
            .build())
        .tasks(FlowTaskArgs.builder()
            .taskType("string")
            .connectorOperators(FlowTaskConnectorOperatorArgs.builder()
                .amplitude("string")
                .customConnector("string")
                .datadog("string")
                .dynatrace("string")
                .googleAnalytics("string")
                .inforNexus("string")
                .marketo("string")
                .s3("string")
                .salesforce("string")
                .sapoData("string")
                .serviceNow("string")
                .singular("string")
                .slack("string")
                .trendmicro("string")
                .veeva("string")
                .zendesk("string")
                .build())
            .destinationField("string")
            .sourceFields("string")
            .taskProperties(Map.of("string", "string"))
            .build())
        .triggerConfig(FlowTriggerConfigArgs.builder()
            .triggerType("string")
            .triggerProperties(FlowTriggerConfigTriggerPropertiesArgs.builder()
                .scheduled(FlowTriggerConfigTriggerPropertiesScheduledArgs.builder()
                    .scheduleExpression("string")
                    .dataPullMode("string")
                    .firstExecutionFrom("string")
                    .scheduleEndTime("string")
                    .scheduleOffset(0)
                    .scheduleStartTime("string")
                    .timezone("string")
                    .build())
                .build())
            .build())
        .description("string")
        .kmsArn("string")
        .name("string")
        .tags(Map.of("string", "string"))
        .build());
    
    flow_resource = aws.appflow.Flow("flowResource",
        destination_flow_configs=[aws.appflow.FlowDestinationFlowConfigArgs(
            connector_type="string",
            destination_connector_properties=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs(
                custom_connector=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs(
                    entity_name="string",
                    custom_properties={
                        "string": "string",
                    },
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                    id_field_names=["string"],
                    write_operation_type="string",
                ),
                customer_profiles=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs(
                    domain_name="string",
                    object_type_name="string",
                ),
                event_bridge=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs(
                    object="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                ),
                honeycode=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs(
                    object="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                ),
                lookout_metrics=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetricsArgs(),
                marketo=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs(
                    object="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                ),
                redshift=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs(
                    intermediate_bucket_name="string",
                    object="string",
                    bucket_prefix="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                ),
                s3=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args(
                    bucket_name="string",
                    bucket_prefix="string",
                    s3_output_format_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs(
                        aggregation_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs(
                            aggregation_type="string",
                            target_file_size=0,
                        ),
                        file_type="string",
                        prefix_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs(
                            prefix_format="string",
                            prefix_type="string",
                        ),
                        preserve_source_data_typing=False,
                    ),
                ),
                salesforce=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs(
                    object="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                    id_field_names=["string"],
                    write_operation_type="string",
                ),
                sapo_data=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs(
                    object_path="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                    id_field_names=["string"],
                    success_response_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                    ),
                    write_operation_type="string",
                ),
                snowflake=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs(
                    intermediate_bucket_name="string",
                    object="string",
                    bucket_prefix="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                ),
                upsolver=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs(
                    bucket_name="string",
                    s3_output_format_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs(
                        prefix_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs(
                            prefix_type="string",
                            prefix_format="string",
                        ),
                        aggregation_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs(
                            aggregation_type="string",
                        ),
                        file_type="string",
                    ),
                    bucket_prefix="string",
                ),
                zendesk=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs(
                    object="string",
                    error_handling_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs(
                        bucket_name="string",
                        bucket_prefix="string",
                        fail_on_first_destination_error=False,
                    ),
                    id_field_names=["string"],
                    write_operation_type="string",
                ),
            ),
            api_version="string",
            connector_profile_name="string",
        )],
        source_flow_config=aws.appflow.FlowSourceFlowConfigArgs(
            connector_type="string",
            source_connector_properties=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs(
                amplitude=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs(
                    object="string",
                ),
                custom_connector=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs(
                    entity_name="string",
                    custom_properties={
                        "string": "string",
                    },
                ),
                datadog=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs(
                    object="string",
                ),
                dynatrace=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs(
                    object="string",
                ),
                google_analytics=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs(
                    object="string",
                ),
                infor_nexus=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs(
                    object="string",
                ),
                marketo=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs(
                    object="string",
                ),
                s3=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args(
                    bucket_name="string",
                    bucket_prefix="string",
                    s3_input_format_config=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs(
                        s3_input_file_type="string",
                    ),
                ),
                salesforce=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs(
                    object="string",
                    enable_dynamic_field_update=False,
                    include_deleted_records=False,
                ),
                sapo_data=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs(
                    object_path="string",
                ),
                service_now=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs(
                    object="string",
                ),
                singular=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs(
                    object="string",
                ),
                slack=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs(
                    object="string",
                ),
                trendmicro=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs(
                    object="string",
                ),
                veeva=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs(
                    object="string",
                    document_type="string",
                    include_all_versions=False,
                    include_renditions=False,
                    include_source_files=False,
                ),
                zendesk=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs(
                    object="string",
                ),
            ),
            api_version="string",
            connector_profile_name="string",
            incremental_pull_config=aws.appflow.FlowSourceFlowConfigIncrementalPullConfigArgs(
                datetime_type_field_name="string",
            ),
        ),
        tasks=[aws.appflow.FlowTaskArgs(
            task_type="string",
            connector_operators=[aws.appflow.FlowTaskConnectorOperatorArgs(
                amplitude="string",
                custom_connector="string",
                datadog="string",
                dynatrace="string",
                google_analytics="string",
                infor_nexus="string",
                marketo="string",
                s3="string",
                salesforce="string",
                sapo_data="string",
                service_now="string",
                singular="string",
                slack="string",
                trendmicro="string",
                veeva="string",
                zendesk="string",
            )],
            destination_field="string",
            source_fields=["string"],
            task_properties={
                "string": "string",
            },
        )],
        trigger_config=aws.appflow.FlowTriggerConfigArgs(
            trigger_type="string",
            trigger_properties=aws.appflow.FlowTriggerConfigTriggerPropertiesArgs(
                scheduled=aws.appflow.FlowTriggerConfigTriggerPropertiesScheduledArgs(
                    schedule_expression="string",
                    data_pull_mode="string",
                    first_execution_from="string",
                    schedule_end_time="string",
                    schedule_offset=0,
                    schedule_start_time="string",
                    timezone="string",
                ),
            ),
        ),
        description="string",
        kms_arn="string",
        name="string",
        tags={
            "string": "string",
        })
    
    const flowResource = new aws.appflow.Flow("flowResource", {
        destinationFlowConfigs: [{
            connectorType: "string",
            destinationConnectorProperties: {
                customConnector: {
                    entityName: "string",
                    customProperties: {
                        string: "string",
                    },
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
                customerProfiles: {
                    domainName: "string",
                    objectTypeName: "string",
                },
                eventBridge: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                honeycode: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                lookoutMetrics: {},
                marketo: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                redshift: {
                    intermediateBucketName: "string",
                    object: "string",
                    bucketPrefix: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                s3: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    s3OutputFormatConfig: {
                        aggregationConfig: {
                            aggregationType: "string",
                            targetFileSize: 0,
                        },
                        fileType: "string",
                        prefixConfig: {
                            prefixFormat: "string",
                            prefixType: "string",
                        },
                        preserveSourceDataTyping: false,
                    },
                },
                salesforce: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
                sapoData: {
                    objectPath: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    successResponseHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                    },
                    writeOperationType: "string",
                },
                snowflake: {
                    intermediateBucketName: "string",
                    object: "string",
                    bucketPrefix: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                },
                upsolver: {
                    bucketName: "string",
                    s3OutputFormatConfig: {
                        prefixConfig: {
                            prefixType: "string",
                            prefixFormat: "string",
                        },
                        aggregationConfig: {
                            aggregationType: "string",
                        },
                        fileType: "string",
                    },
                    bucketPrefix: "string",
                },
                zendesk: {
                    object: "string",
                    errorHandlingConfig: {
                        bucketName: "string",
                        bucketPrefix: "string",
                        failOnFirstDestinationError: false,
                    },
                    idFieldNames: ["string"],
                    writeOperationType: "string",
                },
            },
            apiVersion: "string",
            connectorProfileName: "string",
        }],
        sourceFlowConfig: {
            connectorType: "string",
            sourceConnectorProperties: {
                amplitude: {
                    object: "string",
                },
                customConnector: {
                    entityName: "string",
                    customProperties: {
                        string: "string",
                    },
                },
                datadog: {
                    object: "string",
                },
                dynatrace: {
                    object: "string",
                },
                googleAnalytics: {
                    object: "string",
                },
                inforNexus: {
                    object: "string",
                },
                marketo: {
                    object: "string",
                },
                s3: {
                    bucketName: "string",
                    bucketPrefix: "string",
                    s3InputFormatConfig: {
                        s3InputFileType: "string",
                    },
                },
                salesforce: {
                    object: "string",
                    enableDynamicFieldUpdate: false,
                    includeDeletedRecords: false,
                },
                sapoData: {
                    objectPath: "string",
                },
                serviceNow: {
                    object: "string",
                },
                singular: {
                    object: "string",
                },
                slack: {
                    object: "string",
                },
                trendmicro: {
                    object: "string",
                },
                veeva: {
                    object: "string",
                    documentType: "string",
                    includeAllVersions: false,
                    includeRenditions: false,
                    includeSourceFiles: false,
                },
                zendesk: {
                    object: "string",
                },
            },
            apiVersion: "string",
            connectorProfileName: "string",
            incrementalPullConfig: {
                datetimeTypeFieldName: "string",
            },
        },
        tasks: [{
            taskType: "string",
            connectorOperators: [{
                amplitude: "string",
                customConnector: "string",
                datadog: "string",
                dynatrace: "string",
                googleAnalytics: "string",
                inforNexus: "string",
                marketo: "string",
                s3: "string",
                salesforce: "string",
                sapoData: "string",
                serviceNow: "string",
                singular: "string",
                slack: "string",
                trendmicro: "string",
                veeva: "string",
                zendesk: "string",
            }],
            destinationField: "string",
            sourceFields: ["string"],
            taskProperties: {
                string: "string",
            },
        }],
        triggerConfig: {
            triggerType: "string",
            triggerProperties: {
                scheduled: {
                    scheduleExpression: "string",
                    dataPullMode: "string",
                    firstExecutionFrom: "string",
                    scheduleEndTime: "string",
                    scheduleOffset: 0,
                    scheduleStartTime: "string",
                    timezone: "string",
                },
            },
        },
        description: "string",
        kmsArn: "string",
        name: "string",
        tags: {
            string: "string",
        },
    });
    
    type: aws:appflow:Flow
    properties:
        description: string
        destinationFlowConfigs:
            - apiVersion: string
              connectorProfileName: string
              connectorType: string
              destinationConnectorProperties:
                customConnector:
                    customProperties:
                        string: string
                    entityName: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    writeOperationType: string
                customerProfiles:
                    domainName: string
                    objectTypeName: string
                eventBridge:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                honeycode:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                lookoutMetrics: {}
                marketo:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    object: string
                redshift:
                    bucketPrefix: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    intermediateBucketName: string
                    object: string
                s3:
                    bucketName: string
                    bucketPrefix: string
                    s3OutputFormatConfig:
                        aggregationConfig:
                            aggregationType: string
                            targetFileSize: 0
                        fileType: string
                        prefixConfig:
                            prefixFormat: string
                            prefixType: string
                        preserveSourceDataTyping: false
                salesforce:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    object: string
                    writeOperationType: string
                sapoData:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    objectPath: string
                    successResponseHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                    writeOperationType: string
                snowflake:
                    bucketPrefix: string
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    intermediateBucketName: string
                    object: string
                upsolver:
                    bucketName: string
                    bucketPrefix: string
                    s3OutputFormatConfig:
                        aggregationConfig:
                            aggregationType: string
                        fileType: string
                        prefixConfig:
                            prefixFormat: string
                            prefixType: string
                zendesk:
                    errorHandlingConfig:
                        bucketName: string
                        bucketPrefix: string
                        failOnFirstDestinationError: false
                    idFieldNames:
                        - string
                    object: string
                    writeOperationType: string
        kmsArn: string
        name: string
        sourceFlowConfig:
            apiVersion: string
            connectorProfileName: string
            connectorType: string
            incrementalPullConfig:
                datetimeTypeFieldName: string
            sourceConnectorProperties:
                amplitude:
                    object: string
                customConnector:
                    customProperties:
                        string: string
                    entityName: string
                datadog:
                    object: string
                dynatrace:
                    object: string
                googleAnalytics:
                    object: string
                inforNexus:
                    object: string
                marketo:
                    object: string
                s3:
                    bucketName: string
                    bucketPrefix: string
                    s3InputFormatConfig:
                        s3InputFileType: string
                salesforce:
                    enableDynamicFieldUpdate: false
                    includeDeletedRecords: false
                    object: string
                sapoData:
                    objectPath: string
                serviceNow:
                    object: string
                singular:
                    object: string
                slack:
                    object: string
                trendmicro:
                    object: string
                veeva:
                    documentType: string
                    includeAllVersions: false
                    includeRenditions: false
                    includeSourceFiles: false
                    object: string
                zendesk:
                    object: string
        tags:
            string: string
        tasks:
            - connectorOperators:
                - amplitude: string
                  customConnector: string
                  datadog: string
                  dynatrace: string
                  googleAnalytics: string
                  inforNexus: string
                  marketo: string
                  s3: string
                  salesforce: string
                  sapoData: string
                  serviceNow: string
                  singular: string
                  slack: string
                  trendmicro: string
                  veeva: string
                  zendesk: string
              destinationField: string
              sourceFields:
                - string
              taskProperties:
                string: string
              taskType: string
        triggerConfig:
            triggerProperties:
                scheduled:
                    dataPullMode: string
                    firstExecutionFrom: string
                    scheduleEndTime: string
                    scheduleExpression: string
                    scheduleOffset: 0
                    scheduleStartTime: string
                    timezone: string
            triggerType: string
    

    Flow Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Flow resource accepts the following input properties:

    DestinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    SourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    Description string
    Description of the flow you want to create.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    Name string
    Name of the flow.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    DestinationFlowConfigs []FlowDestinationFlowConfigArgs
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    SourceFlowConfig FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tasks []FlowTaskArgs
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    Description string
    Description of the flow you want to create.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    Name string
    Name of the flow.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    description String
    Description of the flow you want to create.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name String
    Name of the flow.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs FlowDestinationFlowConfig[]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks FlowTask[]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    description string
    Description of the flow you want to create.
    kmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name string
    Name of the flow.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destination_flow_configs Sequence[FlowDestinationFlowConfigArgs]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    source_flow_config FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks Sequence[FlowTaskArgs]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    trigger_config FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    description str
    Description of the flow you want to create.
    kms_arn str
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name str
    Name of the flow.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    destinationFlowConfigs List<Property Map>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    sourceFlowConfig Property Map
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tasks List<Property Map>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig Property Map
    A Trigger that determine how and when the flow runs.
    description String
    Description of the flow you want to create.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name String
    Name of the flow.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Flow resource produces the following output properties:

    Arn string
    Flow's ARN.
    FlowStatus string
    The current status of the flow.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Arn string
    Flow's ARN.
    FlowStatus string
    The current status of the flow.
    Id string
    The provider-assigned unique ID for this managed resource.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    Flow's ARN.
    flowStatus String
    The current status of the flow.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn string
    Flow's ARN.
    flowStatus string
    The current status of the flow.
    id string
    The provider-assigned unique ID for this managed resource.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn str
    Flow's ARN.
    flow_status str
    The current status of the flow.
    id str
    The provider-assigned unique ID for this managed resource.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    arn String
    Flow's ARN.
    flowStatus String
    The current status of the flow.
    id String
    The provider-assigned unique ID for this managed resource.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Look up Existing Flow Resource

    Get an existing Flow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: FlowState, opts?: CustomResourceOptions): Flow
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            arn: Optional[str] = None,
            description: Optional[str] = None,
            destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
            flow_status: Optional[str] = None,
            kms_arn: Optional[str] = None,
            name: Optional[str] = None,
            source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
            tags: Optional[Mapping[str, str]] = None,
            tags_all: Optional[Mapping[str, str]] = None,
            tasks: Optional[Sequence[FlowTaskArgs]] = None,
            trigger_config: Optional[FlowTriggerConfigArgs] = None) -> Flow
    func GetFlow(ctx *Context, name string, id IDInput, state *FlowState, opts ...ResourceOption) (*Flow, error)
    public static Flow Get(string name, Input<string> id, FlowState? state, CustomResourceOptions? opts = null)
    public static Flow get(String name, Output<String> id, FlowState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    Arn string
    Flow's ARN.
    Description string
    Description of the flow you want to create.
    DestinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    FlowStatus string
    The current status of the flow.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    Name string
    Name of the flow.
    SourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tags Dictionary<string, string>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll Dictionary<string, string>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    Arn string
    Flow's ARN.
    Description string
    Description of the flow you want to create.
    DestinationFlowConfigs []FlowDestinationFlowConfigArgs
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    FlowStatus string
    The current status of the flow.
    KmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    Name string
    Name of the flow.
    SourceFlowConfig FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    Tags map[string]string
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    TagsAll map[string]string
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    Tasks []FlowTaskArgs
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    TriggerConfig FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    arn String
    Flow's ARN.
    description String
    Description of the flow you want to create.
    destinationFlowConfigs List<FlowDestinationFlowConfig>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus String
    The current status of the flow.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name String
    Name of the flow.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Map<String,String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String,String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks List<FlowTask>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    arn string
    Flow's ARN.
    description string
    Description of the flow you want to create.
    destinationFlowConfigs FlowDestinationFlowConfig[]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus string
    The current status of the flow.
    kmsArn string
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name string
    Name of the flow.
    sourceFlowConfig FlowSourceFlowConfig
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags {[key: string]: string}
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll {[key: string]: string}
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks FlowTask[]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig FlowTriggerConfig
    A Trigger that determine how and when the flow runs.
    arn str
    Flow's ARN.
    description str
    Description of the flow you want to create.
    destination_flow_configs Sequence[FlowDestinationFlowConfigArgs]
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flow_status str
    The current status of the flow.
    kms_arn str
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name str
    Name of the flow.
    source_flow_config FlowSourceFlowConfigArgs
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Mapping[str, str]
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tags_all Mapping[str, str]
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks Sequence[FlowTaskArgs]
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    trigger_config FlowTriggerConfigArgs
    A Trigger that determine how and when the flow runs.
    arn String
    Flow's ARN.
    description String
    Description of the flow you want to create.
    destinationFlowConfigs List<Property Map>
    A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
    flowStatus String
    The current status of the flow.
    kmsArn String
    ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
    name String
    Name of the flow.
    sourceFlowConfig Property Map
    The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
    tags Map<String>
    Key-value mapping of resource tags. If configured with a provider default_tags configuration block present, tags with matching keys will overwrite those defined at the provider-level.
    tagsAll Map<String>
    Map of tags assigned to the resource, including those inherited from the provider default_tags configuration block.

    Deprecated: Please use tags instead.

    tasks List<Property Map>
    A Task that Amazon AppFlow performs while transferring the data in the flow run.
    triggerConfig Property Map
    A Trigger that determine how and when the flow runs.

    Supporting Types

    FlowDestinationFlowConfig, FlowDestinationFlowConfigArgs

    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    DestinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    DestinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion string
    API version that the destination connector uses.
    connectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connector_type str
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destination_connector_properties FlowDestinationFlowConfigDestinationConnectorProperties
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    api_version str
    API version that the destination connector uses.
    connector_profile_name str
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    destinationConnectorProperties Property Map
    This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.

    FlowDestinationFlowConfigDestinationConnectorProperties, FlowDestinationFlowConfigDestinationConnectorPropertiesArgs

    CustomConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    CustomerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    EventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    Honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    LookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    Marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    Redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    S3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    Salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    SapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    Snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    Upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    Zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    CustomConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    CustomerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    EventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    Honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    LookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    Marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    Redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    S3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    Salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    SapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    Snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    Upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    Zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    custom_connector FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customer_profiles FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    event_bridge FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookout_metrics FlowDestinationFlowConfigDestinationConnectorPropertiesLookoutMetrics
    marketo FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 FlowDestinationFlowConfigDestinationConnectorPropertiesS3
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapo_data FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
    customConnector Property Map
    Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
    customerProfiles Property Map
    Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
    eventBridge Property Map
    Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
    honeycode Property Map
    Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
    lookoutMetrics Property Map
    marketo Property Map
    Properties that are required to query Marketo. See Generic Destination Properties for more details.
    redshift Property Map
    Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
    s3 Property Map
    Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
    salesforce Property Map
    Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
    sapoData Property Map
    Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
    snowflake Property Map
    Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
    upsolver Property Map
    Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
    zendesk Property Map
    Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs

    EntityName string
    Entity specified in the custom connector as a destination in the flow.
    CustomProperties Dictionary<string, string>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames List<string>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    EntityName string
    Entity specified in the custom connector as a destination in the flow.
    CustomProperties map[string]string
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames []string
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    entityName String
    Entity specified in the custom connector as a destination in the flow.
    customProperties Map<String,String>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    entityName string
    Entity specified in the custom connector as a destination in the flow.
    customProperties {[key: string]: string}
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames string[]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    entity_name str
    Entity specified in the custom connector as a destination in the flow.
    custom_properties Mapping[str, str]
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    id_field_names Sequence[str]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    write_operation_type str
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    entityName String
    Entity specified in the custom connector as a destination in the flow.
    customProperties Map<String>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs

    DomainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    ObjectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    DomainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    ObjectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName String
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName String
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName string
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName string
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domain_name str
    Unique name of the Amazon Connect Customer Profiles domain.
    object_type_name str
    Object specified in the Amazon Connect Customer Profiles flow destination.
    domainName String
    Unique name of the Amazon Connect Customer Profiles domain.
    objectTypeName String
    Object specified in the Amazon Connect Customer Profiles flow destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs

    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object string
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object str
    Object specified in the flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs

    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object string
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object str
    Object specified in the flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs

    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object string
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object str
    Object specified in the flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    object String
    Object specified in the flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs

    IntermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    Object string
    Object specified in the flow destination.
    BucketPrefix string
    Amazon S3 bucket prefix.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IntermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    Object string
    Object specified in the flow destination.
    BucketPrefix string
    Amazon S3 bucket prefix.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName String
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object String
    Object specified in the flow destination.
    bucketPrefix String
    Amazon S3 bucket prefix.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object string
    Object specified in the flow destination.
    bucketPrefix string
    Amazon S3 bucket prefix.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediate_bucket_name str
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object str
    Object specified in the flow destination.
    bucket_prefix str
    Amazon S3 bucket prefix.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName String
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object String
    Object specified in the flow destination.
    bucketPrefix String
    Amazon S3 bucket prefix.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3, FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    S3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    S3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    s3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    s3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    s3_output_format_config FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    s3OutputFormatConfig Property Map
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs

    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    PreserveSourceDataTyping bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    PreserveSourceDataTyping bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping Boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregation_config FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    file_type str
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefix_config FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserve_source_data_typing bool
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)
    aggregationConfig Property Map
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig Property Map
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    preserveSourceDataTyping Boolean
    Whether the data types from the source system need to be preserved (Only valid for Parquet file type)

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs

    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    TargetFileSize int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    TargetFileSize int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize Integer
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize number
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregation_type str
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    target_file_size int
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    targetFileSize Number
    The desired file size, in MB, for each output file that Amazon AppFlow writes to the flow destination. Integer value.

    FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs

    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefix_format str
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefix_type str
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs

    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames List<string>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames []string
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object String
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object string
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames string[]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object str
    Object specified in the flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    id_field_names Sequence[str]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    write_operation_type str
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object String
    Object specified in the flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs

    ObjectPath string
    Object path specified in the SAPOData flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames List<string>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    SuccessResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    ObjectPath string
    Object path specified in the SAPOData flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames []string
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    SuccessResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    objectPath String
    Object path specified in the SAPOData flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    successResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    objectPath string
    Object path specified in the SAPOData flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames string[]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    successResponseHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object_path str
    Object path specified in the SAPOData flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    id_field_names Sequence[str]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    success_response_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    write_operation_type str
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    objectPath String
    Object path specified in the SAPOData flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    successResponseHandlingConfig Property Map
    Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs

    IntermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    Object string
    Object specified in the flow destination.
    BucketPrefix string
    Amazon S3 bucket prefix.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IntermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    Object string
    Object specified in the flow destination.
    BucketPrefix string
    Amazon S3 bucket prefix.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName String
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object String
    Object specified in the flow destination.
    bucketPrefix String
    Amazon S3 bucket prefix.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName string
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object string
    Object specified in the flow destination.
    bucketPrefix string
    Amazon S3 bucket prefix.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediate_bucket_name str
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object str
    Object specified in the flow destination.
    bucket_prefix str
    Amazon S3 bucket prefix.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    intermediateBucketName String
    Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
    object String
    Object specified in the flow destination.
    bucketPrefix String
    Amazon S3 bucket prefix.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.

    FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs

    BucketName string
    Name of the Amazon S3 bucket.
    S3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    BucketPrefix string
    Amazon S3 bucket prefix.
    BucketName string
    Name of the Amazon S3 bucket.
    S3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    BucketPrefix string
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    s3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketPrefix String
    Amazon S3 bucket prefix.
    bucketName string
    Name of the Amazon S3 bucket.
    s3OutputFormatConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketPrefix string
    Amazon S3 bucket prefix.
    bucket_name str
    Name of the Amazon S3 bucket.
    s3_output_format_config FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucket_prefix str
    Amazon S3 bucket prefix.
    bucketName String
    Name of the Amazon S3 bucket.
    s3OutputFormatConfig Property Map
    Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
    bucketPrefix String
    Amazon S3 bucket prefix.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs

    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    PrefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    AggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    FileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType string
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefix_config FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregation_config FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    file_type str
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.
    prefixConfig Property Map
    Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
    aggregationConfig Property Map
    Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
    fileType String
    File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are CSV, JSON, and PARQUET.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs

    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    AggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType string
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregation_type str
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.
    aggregationType String
    Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are None and SingleFile.

    FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs

    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    PrefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    PrefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType string
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat string
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefix_type str
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefix_format str
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.
    prefixType String
    Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are FILENAME, PATH, and PATH_AND_FILENAME.
    prefixFormat String
    Determines the level of granularity that's included in the prefix. Valid values are YEAR, MONTH, DAY, HOUR, and MINUTE.

    FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs

    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames List<string>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    Object string
    Object specified in the flow destination.
    ErrorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    IdFieldNames []string
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    WriteOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object String
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object string
    Object specified in the flow destination.
    errorHandlingConfig FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames string[]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType string
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object str
    Object specified in the flow destination.
    error_handling_config FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    id_field_names Sequence[str]
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    write_operation_type str
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.
    object String
    Object specified in the flow destination.
    errorHandlingConfig Property Map
    Settings that determine how Amazon AppFlow handles an error when placing data in the custom connector as destination. See Error Handling Config for more details.
    idFieldNames List<String>
    Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
    writeOperationType String
    Type of write operation to be performed in the custom connector when it's used as destination. Valid values are INSERT, UPSERT, UPDATE, and DELETE.

    FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    FailOnFirstDestinationError bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    failOnFirstDestinationError boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    fail_on_first_destination_error bool
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    failOnFirstDestinationError Boolean
    If the flow should fail after the first instance of a failure when attempting to place data in the destination.

    FlowSourceFlowConfig, FlowSourceFlowConfigArgs

    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    SourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    IncrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    ConnectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    SourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    ApiVersion string
    API version that the destination connector uses.
    ConnectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    IncrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType string
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion string
    API version that the destination connector uses.
    connectorProfileName string
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connector_type str
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    source_connector_properties FlowSourceFlowConfigSourceConnectorProperties
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    api_version str
    API version that the destination connector uses.
    connector_profile_name str
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incremental_pull_config FlowSourceFlowConfigIncrementalPullConfig
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
    connectorType String
    Type of connector, such as Salesforce, Amplitude, and so on. Valid values are Salesforce, Singular, Slack, Redshift, S3, Marketo, Googleanalytics, Zendesk, Servicenow, Datadog, Trendmicro, Snowflake, Dynatrace, Infornexus, Amplitude, Veeva, EventBridge, LookoutMetrics, Upsolver, Honeycode, CustomerProfiles, SAPOData, and CustomConnector.
    sourceConnectorProperties Property Map
    Information that is required to query a particular source connector. See Source Connector Properties for details.
    apiVersion String
    API version that the destination connector uses.
    connectorProfileName String
    Name of the connector profile. This name must be unique for each connector profile in the AWS account.
    incrementalPullConfig Property Map
    Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.

    FlowSourceFlowConfigIncrementalPullConfig, FlowSourceFlowConfigIncrementalPullConfigArgs

    DatetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    DatetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName String
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName string
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetime_type_field_name str
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
    datetimeTypeFieldName String
    Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.

    FlowSourceFlowConfigSourceConnectorProperties, FlowSourceFlowConfigSourceConnectorPropertiesArgs

    Amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    CustomConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    Datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    Dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    Marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    S3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    Salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    SapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    ServiceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    Singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    Slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    Trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    Zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    Amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    CustomConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    Datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    Dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    Marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    S3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    Salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    SapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    ServiceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    Singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    Slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    Trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    Zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude FlowSourceFlowConfigSourceConnectorPropertiesAmplitude
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    custom_connector FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog FlowSourceFlowConfigSourceConnectorPropertiesDatadog
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace FlowSourceFlowConfigSourceConnectorPropertiesDynatrace
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    google_analytics FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    infor_nexus FlowSourceFlowConfigSourceConnectorPropertiesInforNexus
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo FlowSourceFlowConfigSourceConnectorPropertiesMarketo
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 FlowSourceFlowConfigSourceConnectorPropertiesS3
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce FlowSourceFlowConfigSourceConnectorPropertiesSalesforce
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapo_data FlowSourceFlowConfigSourceConnectorPropertiesSapoData
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    service_now FlowSourceFlowConfigSourceConnectorPropertiesServiceNow
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular FlowSourceFlowConfigSourceConnectorPropertiesSingular
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack FlowSourceFlowConfigSourceConnectorPropertiesSlack
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva FlowSourceFlowConfigSourceConnectorPropertiesVeeva
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk FlowSourceFlowConfigSourceConnectorPropertiesZendesk
    Information that is required for querying Zendesk. See Generic Source Properties for more details.
    amplitude Property Map
    Information that is required for querying Amplitude. See Generic Source Properties for more details.
    customConnector Property Map
    Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
    datadog Property Map
    Information that is required for querying Datadog. See Generic Source Properties for more details.
    dynatrace Property Map
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics Property Map
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus Property Map
    Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
    marketo Property Map
    Information that is required for querying Marketo. See Generic Source Properties for more details.
    s3 Property Map
    Information that is required for querying Amazon S3. See S3 Source Properties for more details.
    salesforce Property Map
    Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
    sapoData Property Map
    Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
    serviceNow Property Map
    Information that is required for querying ServiceNow. See Generic Source Properties for more details.
    singular Property Map
    Information that is required for querying Singular. See Generic Source Properties for more details.
    slack Property Map
    Information that is required for querying Slack. See Generic Source Properties for more details.
    trendmicro Property Map
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva Property Map
    Information that is required for querying Veeva. See Veeva Source Properties for more details.
    zendesk Property Map
    Information that is required for querying Zendesk. See Generic Source Properties for more details.

    FlowSourceFlowConfigSourceConnectorPropertiesAmplitude, FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector, FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs

    EntityName string
    Entity specified in the custom connector as a destination in the flow.
    CustomProperties Dictionary<string, string>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    EntityName string
    Entity specified in the custom connector as a destination in the flow.
    CustomProperties map[string]string
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    entityName String
    Entity specified in the custom connector as a destination in the flow.
    customProperties Map<String,String>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    entityName string
    Entity specified in the custom connector as a destination in the flow.
    customProperties {[key: string]: string}
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    entity_name str
    Entity specified in the custom connector as a destination in the flow.
    custom_properties Mapping[str, str]
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
    entityName String
    Entity specified in the custom connector as a destination in the flow.
    customProperties Map<String>
    Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.

    FlowSourceFlowConfigSourceConnectorPropertiesDatadog, FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesDynatrace, FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics, FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesInforNexus, FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesMarketo, FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesS3, FlowSourceFlowConfigSourceConnectorPropertiesS3Args

    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    S3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    BucketName string
    Name of the Amazon S3 bucket.
    BucketPrefix string
    Amazon S3 bucket prefix.
    S3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    s3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName string
    Name of the Amazon S3 bucket.
    bucketPrefix string
    Amazon S3 bucket prefix.
    s3InputFormatConfig FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucket_name str
    Name of the Amazon S3 bucket.
    bucket_prefix str
    Amazon S3 bucket prefix.
    s3_input_format_config FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
    bucketName String
    Name of the Amazon S3 bucket.
    bucketPrefix String
    Amazon S3 bucket prefix.
    s3InputFormatConfig Property Map
    When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.

    FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig, FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs

    S3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    S3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType String
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType string
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3_input_file_type str
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.
    s3InputFileType String
    File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are CSV and JSON.

    FlowSourceFlowConfigSourceConnectorPropertiesSalesforce, FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs

    Object string
    Object specified in the flow destination.
    EnableDynamicFieldUpdate bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    IncludeDeletedRecords bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    Object string
    Object specified in the flow destination.
    EnableDynamicFieldUpdate bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    IncludeDeletedRecords bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    object String
    Object specified in the flow destination.
    enableDynamicFieldUpdate Boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords Boolean
    Whether Amazon AppFlow includes deleted files in the flow run.
    object string
    Object specified in the flow destination.
    enableDynamicFieldUpdate boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords boolean
    Whether Amazon AppFlow includes deleted files in the flow run.
    object str
    Object specified in the flow destination.
    enable_dynamic_field_update bool
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    include_deleted_records bool
    Whether Amazon AppFlow includes deleted files in the flow run.
    object String
    Object specified in the flow destination.
    enableDynamicFieldUpdate Boolean
    Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
    includeDeletedRecords Boolean
    Whether Amazon AppFlow includes deleted files in the flow run.

    FlowSourceFlowConfigSourceConnectorPropertiesSapoData, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs

    ObjectPath string
    Object path specified in the SAPOData flow destination.
    ObjectPath string
    Object path specified in the SAPOData flow destination.
    objectPath String
    Object path specified in the SAPOData flow destination.
    objectPath string
    Object path specified in the SAPOData flow destination.
    object_path str
    Object path specified in the SAPOData flow destination.
    objectPath String
    Object path specified in the SAPOData flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesServiceNow, FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesSingular, FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesSlack, FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro, FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowSourceFlowConfigSourceConnectorPropertiesVeeva, FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs

    Object string
    Object specified in the flow destination.
    DocumentType string
    Document type specified in the Veeva document extract flow.
    IncludeAllVersions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    IncludeRenditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    IncludeSourceFiles bool
    Boolean value to include source files in Veeva document extract flow.
    Object string
    Object specified in the flow destination.
    DocumentType string
    Document type specified in the Veeva document extract flow.
    IncludeAllVersions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    IncludeRenditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    IncludeSourceFiles bool
    Boolean value to include source files in Veeva document extract flow.
    object String
    Object specified in the flow destination.
    documentType String
    Document type specified in the Veeva document extract flow.
    includeAllVersions Boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions Boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles Boolean
    Boolean value to include source files in Veeva document extract flow.
    object string
    Object specified in the flow destination.
    documentType string
    Document type specified in the Veeva document extract flow.
    includeAllVersions boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles boolean
    Boolean value to include source files in Veeva document extract flow.
    object str
    Object specified in the flow destination.
    document_type str
    Document type specified in the Veeva document extract flow.
    include_all_versions bool
    Boolean value to include All Versions of files in Veeva document extract flow.
    include_renditions bool
    Boolean value to include file renditions in Veeva document extract flow.
    include_source_files bool
    Boolean value to include source files in Veeva document extract flow.
    object String
    Object specified in the flow destination.
    documentType String
    Document type specified in the Veeva document extract flow.
    includeAllVersions Boolean
    Boolean value to include All Versions of files in Veeva document extract flow.
    includeRenditions Boolean
    Boolean value to include file renditions in Veeva document extract flow.
    includeSourceFiles Boolean
    Boolean value to include source files in Veeva document extract flow.

    FlowSourceFlowConfigSourceConnectorPropertiesZendesk, FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs

    Object string
    Object specified in the flow destination.
    Object string
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.
    object string
    Object specified in the flow destination.
    object str
    Object specified in the flow destination.
    object String
    Object specified in the flow destination.

    FlowTask, FlowTaskArgs

    TaskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    ConnectorOperators List<FlowTaskConnectorOperator>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    DestinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    SourceFields List<string>
    Source fields to which a particular task is applied.
    TaskProperties Dictionary<string, string>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    TaskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    ConnectorOperators []FlowTaskConnectorOperator
    Operation to be performed on the provided source fields. See Connector Operator for details.
    DestinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    SourceFields []string
    Source fields to which a particular task is applied.
    TaskProperties map[string]string
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType String
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators List<FlowTaskConnectorOperator>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField String
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields List<String>
    Source fields to which a particular task is applied.
    taskProperties Map<String,String>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType string
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators FlowTaskConnectorOperator[]
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField string
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields string[]
    Source fields to which a particular task is applied.
    taskProperties {[key: string]: string}
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    task_type str
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connector_operators Sequence[FlowTaskConnectorOperator]
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destination_field str
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    source_fields Sequence[str]
    Source fields to which a particular task is applied.
    task_properties Mapping[str, str]
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.
    taskType String
    Particular task implementation that Amazon AppFlow performs. Valid values are Arithmetic, Filter, Map, Map_all, Mask, Merge, Passthrough, Truncate, and Validate.
    connectorOperators List<Property Map>
    Operation to be performed on the provided source fields. See Connector Operator for details.
    destinationField String
    Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
    sourceFields List<String>
    Source fields to which a particular task is applied.
    taskProperties Map<String>
    Map used to store task-related information. The execution service looks for particular information based on the TaskType. Valid keys are VALUE, VALUES, DATA_TYPE, UPPER_BOUND, LOWER_BOUND, SOURCE_DATA_TYPE, DESTINATION_DATA_TYPE, VALIDATION_ACTION, MASK_VALUE, MASK_LENGTH, TRUNCATE_LENGTH, MATH_OPERATION_FIELDS_ORDER, CONCAT_FORMAT, SUBFIELD_CATEGORY_MAP, and EXCLUDE_SOURCE_FIELDS_LIST.

    FlowTaskConnectorOperator, FlowTaskConnectorOperatorArgs

    Amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    CustomConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    S3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    SapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    ServiceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    CustomConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    GoogleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    InforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    S3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    SapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    ServiceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    Zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude String
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector String
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog String
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace String
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics String
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus String
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo String
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 String
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce String
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData String
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow String
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular String
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack String
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro String
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva String
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk String
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude string
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector string
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog string
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace string
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics string
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus string
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo string
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 string
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce string
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData string
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow string
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular string
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack string
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro string
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva string
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk string
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude str
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    custom_connector str
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog str
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace str
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    google_analytics str
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    infor_nexus str
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo str
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 str
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce str
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapo_data str
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    service_now str
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular str
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack str
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro str
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva str
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk str
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    amplitude String
    Operation to be performed on the provided Amplitude source fields. The only valid value is BETWEEN.
    customConnector String
    Operators supported by the custom connector. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    datadog String
    Operation to be performed on the provided Datadog source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    dynatrace String
    Operation to be performed on the provided Dynatrace source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    googleAnalytics String
    Operation to be performed on the provided Google Analytics source fields. Valid values are PROJECTION and BETWEEN.
    inforNexus String
    Operation to be performed on the provided Infor Nexus source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    marketo String
    Operation to be performed on the provided Marketo source fields. Valid values are PROJECTION, BETWEEN, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    s3 String
    Operation to be performed on the provided Amazon S3 source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    salesforce String
    Operation to be performed on the provided Salesforce source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    sapoData String
    Operation to be performed on the provided SAPOData source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    serviceNow String
    Operation to be performed on the provided ServiceNow source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    singular String
    Operation to be performed on the provided Singular source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    slack String
    Operation to be performed on the provided Slack source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    trendmicro String
    Operation to be performed on the provided Trend Micro source fields. Valid values are PROJECTION, EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    veeva String
    Operation to be performed on the provided Veeva source fields. Valid values are PROJECTION, LESS_THAN, GREATER_THAN, CONTAINS, BETWEEN, LESS_THAN_OR_EQUAL_TO, GREATER_THAN_OR_EQUAL_TO, EQUAL_TO, NOT_EQUAL_TO, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.
    zendesk String
    Operation to be performed on the provided Zendesk source fields. Valid values are PROJECTION, GREATER_THAN, ADDITION, MULTIPLICATION, DIVISION, SUBTRACTION, MASK_ALL, MASK_FIRST_N, MASK_LAST_N, VALIDATE_NON_NULL, VALIDATE_NON_ZERO, VALIDATE_NON_NEGATIVE, VALIDATE_NUMERIC, and NO_OP.

    FlowTriggerConfig, FlowTriggerConfigArgs

    TriggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    TriggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    TriggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    TriggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType String
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType string
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    trigger_type str
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    trigger_properties FlowTriggerConfigTriggerProperties
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.
    triggerType String
    Type of flow trigger. Valid values are Scheduled, Event, and OnDemand.
    triggerProperties Property Map
    Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the Scheduled trigger type. See Scheduled Trigger Properties for details.

    FlowTriggerConfigTriggerProperties, FlowTriggerConfigTriggerPropertiesArgs

    FlowTriggerConfigTriggerPropertiesScheduled, FlowTriggerConfigTriggerPropertiesScheduledArgs

    ScheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    DataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    FirstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    ScheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    ScheduleOffset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    ScheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    Timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    ScheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    DataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    FirstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    ScheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    ScheduleOffset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    ScheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    Timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression String
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode String
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom String
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime String
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset Integer
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime String
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone String
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression string
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode string
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom string
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime string
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset number
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime string
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone string
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    schedule_expression str
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    data_pull_mode str
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    first_execution_from str
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    schedule_end_time str
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    schedule_offset int
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    schedule_start_time str
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone str
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    
    scheduleExpression String
    Scheduling expression that determines the rate at which the schedule will run, for example rate(5minutes).
    dataPullMode String
    Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are Incremental and Complete.
    firstExecutionFrom String
    Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
    scheduleEndTime String
    Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    scheduleOffset Number
    Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
    scheduleStartTime String
    Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
    timezone String
    Time zone used when referring to the date and time of a scheduled-triggered flow, such as America/New_York.

    package generated_program;
    

    import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;

    public class App { public static void main(String[] args) { Pulumi.run(App::stack); }

    public static void stack(Context ctx) {
        var example = new Flow(&quot;example&quot;, FlowArgs.builder()        
            .triggerConfig(FlowTriggerConfigArgs.builder()
                .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference))
                .build())
            .build());
    
    }
    

    }

    resources:
      example:
        type: aws:appflow:Flow
        properties:
          triggerConfig:
            scheduled:
              - scheduleExpression: rate(1minutes)
    

    Import

    Using pulumi import, import AppFlow flows using the arn. For example:

    $ pulumi import aws:appflow/flow:Flow example arn:aws:appflow:us-west-2:123456789012:flow/example-flow
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    AWS Classic pulumi/pulumi-aws
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aws Terraform Provider.
    aws logo

    Try AWS Native preview for resources not in the classic version.

    AWS Classic v6.32.0 published on Friday, Apr 19, 2024 by Pulumi