Try AWS Native preview for resources not in the classic version.
aws.appflow.Flow
Explore with Pulumi AI
Try AWS Native preview for resources not in the classic version.
Provides an AppFlow flow resource.
Example Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() =>
{
var exampleSourceBucketV2 = new Aws.S3.BucketV2("exampleSourceBucketV2");
var exampleSourcePolicyDocument = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Sid = "AllowAppFlowSourceActions",
Effect = "Allow",
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
"appflow.amazonaws.com",
},
},
},
Actions = new[]
{
"s3:ListBucket",
"s3:GetObject",
},
Resources = new[]
{
"arn:aws:s3:::example_source",
"arn:aws:s3:::example_source/*",
},
},
},
});
var exampleSourceBucketPolicy = new Aws.S3.BucketPolicy("exampleSourceBucketPolicy", new()
{
Bucket = exampleSourceBucketV2.Id,
Policy = exampleSourcePolicyDocument.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var exampleBucketObjectv2 = new Aws.S3.BucketObjectv2("exampleBucketObjectv2", new()
{
Bucket = exampleSourceBucketV2.Id,
Key = "example_source.csv",
Source = new FileAsset("example_source.csv"),
});
var exampleDestinationBucketV2 = new Aws.S3.BucketV2("exampleDestinationBucketV2");
var exampleDestinationPolicyDocument = Aws.Iam.GetPolicyDocument.Invoke(new()
{
Statements = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementInputArgs
{
Sid = "AllowAppFlowDestinationActions",
Effect = "Allow",
Principals = new[]
{
new Aws.Iam.Inputs.GetPolicyDocumentStatementPrincipalInputArgs
{
Type = "Service",
Identifiers = new[]
{
"appflow.amazonaws.com",
},
},
},
Actions = new[]
{
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
},
Resources = new[]
{
"arn:aws:s3:::example_destination",
"arn:aws:s3:::example_destination/*",
},
},
},
});
var exampleDestinationBucketPolicy = new Aws.S3.BucketPolicy("exampleDestinationBucketPolicy", new()
{
Bucket = exampleDestinationBucketV2.Id,
Policy = exampleDestinationPolicyDocument.Apply(getPolicyDocumentResult => getPolicyDocumentResult.Json),
});
var exampleFlow = new Aws.AppFlow.Flow("exampleFlow", new()
{
SourceFlowConfig = new Aws.AppFlow.Inputs.FlowSourceFlowConfigArgs
{
ConnectorType = "S3",
SourceConnectorProperties = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs
{
S3 = new Aws.AppFlow.Inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args
{
BucketName = exampleSourceBucketPolicy.Bucket,
BucketPrefix = "example",
},
},
},
DestinationFlowConfigs = new[]
{
new Aws.AppFlow.Inputs.FlowDestinationFlowConfigArgs
{
ConnectorType = "S3",
DestinationConnectorProperties = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
{
S3 = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
{
BucketName = exampleDestinationBucketPolicy.Bucket,
S3OutputFormatConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
{
PrefixConfig = new Aws.AppFlow.Inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
{
PrefixType = "PATH",
},
},
},
},
},
},
Tasks = new[]
{
new Aws.AppFlow.Inputs.FlowTaskArgs
{
SourceFields = new[]
{
"exampleField",
},
DestinationField = "exampleField",
TaskType = "Map",
ConnectorOperators = new[]
{
new Aws.AppFlow.Inputs.FlowTaskConnectorOperatorArgs
{
S3 = "NO_OP",
},
},
},
},
TriggerConfig = new Aws.AppFlow.Inputs.FlowTriggerConfigArgs
{
TriggerType = "OnDemand",
},
});
});
package main
import (
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/appflow"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/iam"
"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/s3"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
exampleSourceBucketV2, err := s3.NewBucketV2(ctx, "exampleSourceBucketV2", nil)
if err != nil {
return err
}
exampleSourcePolicyDocument, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Sid: pulumi.StringRef("AllowAppFlowSourceActions"),
Effect: pulumi.StringRef("Allow"),
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
"appflow.amazonaws.com",
},
},
},
Actions: []string{
"s3:ListBucket",
"s3:GetObject",
},
Resources: []string{
"arn:aws:s3:::example_source",
"arn:aws:s3:::example_source/*",
},
},
},
}, nil)
if err != nil {
return err
}
exampleSourceBucketPolicy, err := s3.NewBucketPolicy(ctx, "exampleSourceBucketPolicy", &s3.BucketPolicyArgs{
Bucket: exampleSourceBucketV2.ID(),
Policy: *pulumi.String(exampleSourcePolicyDocument.Json),
})
if err != nil {
return err
}
_, err = s3.NewBucketObjectv2(ctx, "exampleBucketObjectv2", &s3.BucketObjectv2Args{
Bucket: exampleSourceBucketV2.ID(),
Key: pulumi.String("example_source.csv"),
Source: pulumi.NewFileAsset("example_source.csv"),
})
if err != nil {
return err
}
exampleDestinationBucketV2, err := s3.NewBucketV2(ctx, "exampleDestinationBucketV2", nil)
if err != nil {
return err
}
exampleDestinationPolicyDocument, err := iam.GetPolicyDocument(ctx, &iam.GetPolicyDocumentArgs{
Statements: []iam.GetPolicyDocumentStatement{
{
Sid: pulumi.StringRef("AllowAppFlowDestinationActions"),
Effect: pulumi.StringRef("Allow"),
Principals: []iam.GetPolicyDocumentStatementPrincipal{
{
Type: "Service",
Identifiers: []string{
"appflow.amazonaws.com",
},
},
},
Actions: []string{
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
},
Resources: []string{
"arn:aws:s3:::example_destination",
"arn:aws:s3:::example_destination/*",
},
},
},
}, nil)
if err != nil {
return err
}
exampleDestinationBucketPolicy, err := s3.NewBucketPolicy(ctx, "exampleDestinationBucketPolicy", &s3.BucketPolicyArgs{
Bucket: exampleDestinationBucketV2.ID(),
Policy: *pulumi.String(exampleDestinationPolicyDocument.Json),
})
if err != nil {
return err
}
_, err = appflow.NewFlow(ctx, "exampleFlow", &appflow.FlowArgs{
SourceFlowConfig: &appflow.FlowSourceFlowConfigArgs{
ConnectorType: pulumi.String("S3"),
SourceConnectorProperties: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs{
S3: &appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args{
BucketName: exampleSourceBucketPolicy.Bucket,
BucketPrefix: pulumi.String("example"),
},
},
},
DestinationFlowConfigs: appflow.FlowDestinationFlowConfigArray{
&appflow.FlowDestinationFlowConfigArgs{
ConnectorType: pulumi.String("S3"),
DestinationConnectorProperties: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs{
S3: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args{
BucketName: exampleDestinationBucketPolicy.Bucket,
S3OutputFormatConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs{
PrefixConfig: &appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs{
PrefixType: pulumi.String("PATH"),
},
},
},
},
},
},
Tasks: appflow.FlowTaskArray{
&appflow.FlowTaskArgs{
SourceFields: pulumi.StringArray{
pulumi.String("exampleField"),
},
DestinationField: pulumi.String("exampleField"),
TaskType: pulumi.String("Map"),
ConnectorOperators: appflow.FlowTaskConnectorOperatorArray{
&appflow.FlowTaskConnectorOperatorArgs{
S3: pulumi.String("NO_OP"),
},
},
},
},
TriggerConfig: &appflow.FlowTriggerConfigArgs{
TriggerType: pulumi.String("OnDemand"),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.s3.BucketV2;
import com.pulumi.aws.iam.IamFunctions;
import com.pulumi.aws.iam.inputs.GetPolicyDocumentArgs;
import com.pulumi.aws.s3.BucketPolicy;
import com.pulumi.aws.s3.BucketPolicyArgs;
import com.pulumi.aws.s3.BucketObjectv2;
import com.pulumi.aws.s3.BucketObjectv2Args;
import com.pulumi.aws.appflow.Flow;
import com.pulumi.aws.appflow.FlowArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowSourceFlowConfigSourceConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs;
import com.pulumi.aws.appflow.inputs.FlowTaskArgs;
import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs;
import com.pulumi.asset.FileAsset;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleSourceBucketV2 = new BucketV2("exampleSourceBucketV2");
final var exampleSourcePolicyDocument = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.sid("AllowAppFlowSourceActions")
.effect("Allow")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers("appflow.amazonaws.com")
.build())
.actions(
"s3:ListBucket",
"s3:GetObject")
.resources(
"arn:aws:s3:::example_source",
"arn:aws:s3:::example_source/*")
.build())
.build());
var exampleSourceBucketPolicy = new BucketPolicy("exampleSourceBucketPolicy", BucketPolicyArgs.builder()
.bucket(exampleSourceBucketV2.id())
.policy(exampleSourcePolicyDocument.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
.build());
var exampleBucketObjectv2 = new BucketObjectv2("exampleBucketObjectv2", BucketObjectv2Args.builder()
.bucket(exampleSourceBucketV2.id())
.key("example_source.csv")
.source(new FileAsset("example_source.csv"))
.build());
var exampleDestinationBucketV2 = new BucketV2("exampleDestinationBucketV2");
final var exampleDestinationPolicyDocument = IamFunctions.getPolicyDocument(GetPolicyDocumentArgs.builder()
.statements(GetPolicyDocumentStatementArgs.builder()
.sid("AllowAppFlowDestinationActions")
.effect("Allow")
.principals(GetPolicyDocumentStatementPrincipalArgs.builder()
.type("Service")
.identifiers("appflow.amazonaws.com")
.build())
.actions(
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl")
.resources(
"arn:aws:s3:::example_destination",
"arn:aws:s3:::example_destination/*")
.build())
.build());
var exampleDestinationBucketPolicy = new BucketPolicy("exampleDestinationBucketPolicy", BucketPolicyArgs.builder()
.bucket(exampleDestinationBucketV2.id())
.policy(exampleDestinationPolicyDocument.applyValue(getPolicyDocumentResult -> getPolicyDocumentResult.json()))
.build());
var exampleFlow = new Flow("exampleFlow", FlowArgs.builder()
.sourceFlowConfig(FlowSourceFlowConfigArgs.builder()
.connectorType("S3")
.sourceConnectorProperties(FlowSourceFlowConfigSourceConnectorPropertiesArgs.builder()
.s3(FlowSourceFlowConfigSourceConnectorPropertiesS3Args.builder()
.bucketName(exampleSourceBucketPolicy.bucket())
.bucketPrefix("example")
.build())
.build())
.build())
.destinationFlowConfigs(FlowDestinationFlowConfigArgs.builder()
.connectorType("S3")
.destinationConnectorProperties(FlowDestinationFlowConfigDestinationConnectorPropertiesArgs.builder()
.s3(FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args.builder()
.bucketName(exampleDestinationBucketPolicy.bucket())
.s3OutputFormatConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs.builder()
.prefixConfig(FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs.builder()
.prefixType("PATH")
.build())
.build())
.build())
.build())
.build())
.tasks(FlowTaskArgs.builder()
.sourceFields("exampleField")
.destinationField("exampleField")
.taskType("Map")
.connectorOperators(FlowTaskConnectorOperatorArgs.builder()
.s3("NO_OP")
.build())
.build())
.triggerConfig(FlowTriggerConfigArgs.builder()
.triggerType("OnDemand")
.build())
.build());
}
}
import pulumi
import pulumi_aws as aws
example_source_bucket_v2 = aws.s3.BucketV2("exampleSourceBucketV2")
example_source_policy_document = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="AllowAppFlowSourceActions",
effect="Allow",
principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Service",
identifiers=["appflow.amazonaws.com"],
)],
actions=[
"s3:ListBucket",
"s3:GetObject",
],
resources=[
"arn:aws:s3:::example_source",
"arn:aws:s3:::example_source/*",
],
)])
example_source_bucket_policy = aws.s3.BucketPolicy("exampleSourceBucketPolicy",
bucket=example_source_bucket_v2.id,
policy=example_source_policy_document.json)
example_bucket_objectv2 = aws.s3.BucketObjectv2("exampleBucketObjectv2",
bucket=example_source_bucket_v2.id,
key="example_source.csv",
source=pulumi.FileAsset("example_source.csv"))
example_destination_bucket_v2 = aws.s3.BucketV2("exampleDestinationBucketV2")
example_destination_policy_document = aws.iam.get_policy_document(statements=[aws.iam.GetPolicyDocumentStatementArgs(
sid="AllowAppFlowDestinationActions",
effect="Allow",
principals=[aws.iam.GetPolicyDocumentStatementPrincipalArgs(
type="Service",
identifiers=["appflow.amazonaws.com"],
)],
actions=[
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
],
resources=[
"arn:aws:s3:::example_destination",
"arn:aws:s3:::example_destination/*",
],
)])
example_destination_bucket_policy = aws.s3.BucketPolicy("exampleDestinationBucketPolicy",
bucket=example_destination_bucket_v2.id,
policy=example_destination_policy_document.json)
example_flow = aws.appflow.Flow("exampleFlow",
source_flow_config=aws.appflow.FlowSourceFlowConfigArgs(
connector_type="S3",
source_connector_properties=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesArgs(
s3=aws.appflow.FlowSourceFlowConfigSourceConnectorPropertiesS3Args(
bucket_name=example_source_bucket_policy.bucket,
bucket_prefix="example",
),
),
),
destination_flow_configs=[aws.appflow.FlowDestinationFlowConfigArgs(
connector_type="S3",
destination_connector_properties=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesArgs(
s3=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args(
bucket_name=example_destination_bucket_policy.bucket,
s3_output_format_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs(
prefix_config=aws.appflow.FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs(
prefix_type="PATH",
),
),
),
),
)],
tasks=[aws.appflow.FlowTaskArgs(
source_fields=["exampleField"],
destination_field="exampleField",
task_type="Map",
connector_operators=[aws.appflow.FlowTaskConnectorOperatorArgs(
s3="NO_OP",
)],
)],
trigger_config=aws.appflow.FlowTriggerConfigArgs(
trigger_type="OnDemand",
))
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const exampleSourceBucketV2 = new aws.s3.BucketV2("exampleSourceBucketV2", {});
const exampleSourcePolicyDocument = aws.iam.getPolicyDocument({
statements: [{
sid: "AllowAppFlowSourceActions",
effect: "Allow",
principals: [{
type: "Service",
identifiers: ["appflow.amazonaws.com"],
}],
actions: [
"s3:ListBucket",
"s3:GetObject",
],
resources: [
"arn:aws:s3:::example_source",
"arn:aws:s3:::example_source/*",
],
}],
});
const exampleSourceBucketPolicy = new aws.s3.BucketPolicy("exampleSourceBucketPolicy", {
bucket: exampleSourceBucketV2.id,
policy: exampleSourcePolicyDocument.then(exampleSourcePolicyDocument => exampleSourcePolicyDocument.json),
});
const exampleBucketObjectv2 = new aws.s3.BucketObjectv2("exampleBucketObjectv2", {
bucket: exampleSourceBucketV2.id,
key: "example_source.csv",
source: new pulumi.asset.FileAsset("example_source.csv"),
});
const exampleDestinationBucketV2 = new aws.s3.BucketV2("exampleDestinationBucketV2", {});
const exampleDestinationPolicyDocument = aws.iam.getPolicyDocument({
statements: [{
sid: "AllowAppFlowDestinationActions",
effect: "Allow",
principals: [{
type: "Service",
identifiers: ["appflow.amazonaws.com"],
}],
actions: [
"s3:PutObject",
"s3:AbortMultipartUpload",
"s3:ListMultipartUploadParts",
"s3:ListBucketMultipartUploads",
"s3:GetBucketAcl",
"s3:PutObjectAcl",
],
resources: [
"arn:aws:s3:::example_destination",
"arn:aws:s3:::example_destination/*",
],
}],
});
const exampleDestinationBucketPolicy = new aws.s3.BucketPolicy("exampleDestinationBucketPolicy", {
bucket: exampleDestinationBucketV2.id,
policy: exampleDestinationPolicyDocument.then(exampleDestinationPolicyDocument => exampleDestinationPolicyDocument.json),
});
const exampleFlow = new aws.appflow.Flow("exampleFlow", {
sourceFlowConfig: {
connectorType: "S3",
sourceConnectorProperties: {
s3: {
bucketName: exampleSourceBucketPolicy.bucket,
bucketPrefix: "example",
},
},
},
destinationFlowConfigs: [{
connectorType: "S3",
destinationConnectorProperties: {
s3: {
bucketName: exampleDestinationBucketPolicy.bucket,
s3OutputFormatConfig: {
prefixConfig: {
prefixType: "PATH",
},
},
},
},
}],
tasks: [{
sourceFields: ["exampleField"],
destinationField: "exampleField",
taskType: "Map",
connectorOperators: [{
s3: "NO_OP",
}],
}],
triggerConfig: {
triggerType: "OnDemand",
},
});
resources:
exampleSourceBucketV2:
type: aws:s3:BucketV2
exampleSourceBucketPolicy:
type: aws:s3:BucketPolicy
properties:
bucket: ${exampleSourceBucketV2.id}
policy: ${exampleSourcePolicyDocument.json}
exampleBucketObjectv2:
type: aws:s3:BucketObjectv2
properties:
bucket: ${exampleSourceBucketV2.id}
key: example_source.csv
source:
fn::FileAsset: example_source.csv
exampleDestinationBucketV2:
type: aws:s3:BucketV2
exampleDestinationBucketPolicy:
type: aws:s3:BucketPolicy
properties:
bucket: ${exampleDestinationBucketV2.id}
policy: ${exampleDestinationPolicyDocument.json}
exampleFlow:
type: aws:appflow:Flow
properties:
sourceFlowConfig:
connectorType: S3
sourceConnectorProperties:
s3:
bucketName: ${exampleSourceBucketPolicy.bucket}
bucketPrefix: example
destinationFlowConfigs:
- connectorType: S3
destinationConnectorProperties:
s3:
bucketName: ${exampleDestinationBucketPolicy.bucket}
s3OutputFormatConfig:
prefixConfig:
prefixType: PATH
tasks:
- sourceFields:
- exampleField
destinationField: exampleField
taskType: Map
connectorOperators:
- s3: NO_OP
triggerConfig:
triggerType: OnDemand
variables:
exampleSourcePolicyDocument:
fn::invoke:
Function: aws:iam:getPolicyDocument
Arguments:
statements:
- sid: AllowAppFlowSourceActions
effect: Allow
principals:
- type: Service
identifiers:
- appflow.amazonaws.com
actions:
- s3:ListBucket
- s3:GetObject
resources:
- arn:aws:s3:::example_source
- arn:aws:s3:::example_source/*
exampleDestinationPolicyDocument:
fn::invoke:
Function: aws:iam:getPolicyDocument
Arguments:
statements:
- sid: AllowAppFlowDestinationActions
effect: Allow
principals:
- type: Service
identifiers:
- appflow.amazonaws.com
actions:
- s3:PutObject
- s3:AbortMultipartUpload
- s3:ListMultipartUploadParts
- s3:ListBucketMultipartUploads
- s3:GetBucketAcl
- s3:PutObjectAcl
resources:
- arn:aws:s3:::example_destination
- arn:aws:s3:::example_destination/*
Create Flow Resource
new Flow(name: string, args: FlowArgs, opts?: CustomResourceOptions);
@overload
def Flow(resource_name: str,
opts: Optional[ResourceOptions] = None,
description: Optional[str] = None,
destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
kms_arn: Optional[str] = None,
name: Optional[str] = None,
source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tasks: Optional[Sequence[FlowTaskArgs]] = None,
trigger_config: Optional[FlowTriggerConfigArgs] = None)
@overload
def Flow(resource_name: str,
args: FlowArgs,
opts: Optional[ResourceOptions] = None)
func NewFlow(ctx *Context, name string, args FlowArgs, opts ...ResourceOption) (*Flow, error)
public Flow(string name, FlowArgs args, CustomResourceOptions? opts = null)
type: aws:appflow:Flow
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FlowArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Flow Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Flow resource accepts the following input properties:
- Destination
Flow List<FlowConfigs Destination Flow Config> A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
List<Flow
Task> A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- Description string
Description of the flow you want to create.
- Kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
Name of the flow.
- Dictionary<string, string>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- Destination
Flow []FlowConfigs Destination Flow Config Args A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Source
Flow FlowConfig Source Flow Config Args The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Tasks
[]Flow
Task Args A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config Args A Trigger that determine how and when the flow runs.
- Description string
Description of the flow you want to create.
- Kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
Name of the flow.
- map[string]string
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow List<FlowConfigs Destination Flow Config> A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
List<Flow
Task> A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- description String
Description of the flow you want to create.
- kms
Arn String ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
Name of the flow.
- Map<String,String>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow FlowConfigs Destination Flow Config[] A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
Flow
Task[] A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- description string
Description of the flow you want to create.
- kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name string
Name of the flow.
- {[key: string]: string}
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination_
flow_ Sequence[Flowconfigs Destination Flow Config Args] A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source_
flow_ Flowconfig Source Flow Config Args The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks
Sequence[Flow
Task Args] A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_
config FlowTrigger Config Args A Trigger that determine how and when the flow runs.
- description str
Description of the flow you want to create.
- kms_
arn str ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name str
Name of the flow.
- Mapping[str, str]
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
- destination
Flow List<Property Map>Configs A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- source
Flow Property MapConfig The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- tasks List<Property Map>
A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config Property Map A Trigger that determine how and when the flow runs.
- description String
Description of the flow you want to create.
- kms
Arn String ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
Name of the flow.
- Map<String>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the Flow resource produces the following output properties:
Look up Existing Flow Resource
Get an existing Flow resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FlowState, opts?: CustomResourceOptions): Flow
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
arn: Optional[str] = None,
description: Optional[str] = None,
destination_flow_configs: Optional[Sequence[FlowDestinationFlowConfigArgs]] = None,
kms_arn: Optional[str] = None,
name: Optional[str] = None,
source_flow_config: Optional[FlowSourceFlowConfigArgs] = None,
tags: Optional[Mapping[str, str]] = None,
tags_all: Optional[Mapping[str, str]] = None,
tasks: Optional[Sequence[FlowTaskArgs]] = None,
trigger_config: Optional[FlowTriggerConfigArgs] = None) -> Flow
func GetFlow(ctx *Context, name string, id IDInput, state *FlowState, opts ...ResourceOption) (*Flow, error)
public static Flow Get(string name, Input<string> id, FlowState? state, CustomResourceOptions? opts = null)
public static Flow get(String name, Output<String> id, FlowState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
Flow's ARN.
- Description string
Description of the flow you want to create.
- Destination
Flow List<FlowConfigs Destination Flow Config> A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
Name of the flow.
- Source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Dictionary<string, string>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- Dictionary<string, string>
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- Tasks
List<Flow
Task> A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- Arn string
Flow's ARN.
- Description string
Description of the flow you want to create.
- Destination
Flow []FlowConfigs Destination Flow Config Args A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- Kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- Name string
Name of the flow.
- Source
Flow FlowConfig Source Flow Config Args The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- map[string]string
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- map[string]string
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- Tasks
[]Flow
Task Args A Task that Amazon AppFlow performs while transferring the data in the flow run.
- Trigger
Config FlowTrigger Config Args A Trigger that determine how and when the flow runs.
- arn String
Flow's ARN.
- description String
Description of the flow you want to create.
- destination
Flow List<FlowConfigs Destination Flow Config> A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- kms
Arn String ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
Name of the flow.
- source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String,String>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- Map<String,String>
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- tasks
List<Flow
Task> A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- arn string
Flow's ARN.
- description string
Description of the flow you want to create.
- destination
Flow FlowConfigs Destination Flow Config[] A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- kms
Arn string ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name string
Name of the flow.
- source
Flow FlowConfig Source Flow Config The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- {[key: string]: string}
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- {[key: string]: string}
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- tasks
Flow
Task[] A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config FlowTrigger Config A Trigger that determine how and when the flow runs.
- arn str
Flow's ARN.
- description str
Description of the flow you want to create.
- destination_
flow_ Sequence[Flowconfigs Destination Flow Config Args] A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- kms_
arn str ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name str
Name of the flow.
- source_
flow_ Flowconfig Source Flow Config Args The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Mapping[str, str]
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- Mapping[str, str]
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- tasks
Sequence[Flow
Task Args] A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger_
config FlowTrigger Config Args A Trigger that determine how and when the flow runs.
- arn String
Flow's ARN.
- description String
Description of the flow you want to create.
- destination
Flow List<Property Map>Configs A Destination Flow Config that controls how Amazon AppFlow places data in the destination connector.
- kms
Arn String ARN (Amazon Resource Name) of the Key Management Service (KMS) key you provide for encryption. This is required if you do not want to use the Amazon AppFlow-managed KMS key. If you don't provide anything here, Amazon AppFlow uses the Amazon AppFlow-managed KMS key.
- name String
Name of the flow.
- source
Flow Property MapConfig The Source Flow Config that controls how Amazon AppFlow retrieves data from the source connector.
- Map<String>
Key-value mapping of resource tags. If configured with a provider
default_tags
configuration block present, tags with matching keys will overwrite those defined at the provider-level.- Map<String>
Map of tags assigned to the resource, including those inherited from the provider
default_tags
configuration block.Please use
tags
instead.- tasks List<Property Map>
A Task that Amazon AppFlow performs while transferring the data in the flow run.
- trigger
Config Property Map A Trigger that determine how and when the flow runs.
Supporting Types
FlowDestinationFlowConfig, FlowDestinationFlowConfigArgs
- Connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- Destination
Connector FlowProperties Destination Flow Config Destination Connector Properties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- Api
Version string API version that the destination connector uses.
- Connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- Destination
Connector FlowProperties Destination Flow Config Destination Connector Properties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- Api
Version string API version that the destination connector uses.
- Connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type String Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- destination
Connector FlowProperties Destination Flow Config Destination Connector Properties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version String API version that the destination connector uses.
- connector
Profile StringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- destination
Connector FlowProperties Destination Flow Config Destination Connector Properties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version string API version that the destination connector uses.
- connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector_
type str Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- destination_
connector_ Flowproperties Destination Flow Config Destination Connector Properties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api_
version str API version that the destination connector uses.
- connector_
profile_ strname Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- connector
Type String Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- destination
Connector Property MapProperties This stores the information that is required to query a particular connector. See Destination Connector Properties for more information.
- api
Version String API version that the destination connector uses.
- connector
Profile StringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
FlowDestinationFlowConfigDestinationConnectorProperties, FlowDestinationFlowConfigDestinationConnectorPropertiesArgs
- Custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- Customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- Event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- Lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - Marketo
Flow
Destination Flow Config Destination Connector Properties Marketo Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
Flow
Destination Flow Config Destination Connector Properties Redshift Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
Flow
Destination Flow Config Destination Connector Properties S3 Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- Sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- Custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- Customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- Event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- Honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- Lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - Marketo
Flow
Destination Flow Config Destination Connector Properties Marketo Properties that are required to query Marketo. See Generic Destination Properties for more details.
- Redshift
Flow
Destination Flow Config Destination Connector Properties Redshift Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- S3
Flow
Destination Flow Config Destination Connector Properties S3 Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- Salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- Sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- Snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- Upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- Zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector FlowDestination Flow Config Destination Connector Properties Custom Connector Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge FlowDestination Flow Config Destination Connector Properties Event Bridge Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data FlowDestination Flow Config Destination Connector Properties Sapo Data Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom_
connector FlowDestination Flow Config Destination Connector Properties Custom Connector Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer_
profiles FlowDestination Flow Config Destination Connector Properties Customer Profiles Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event_
bridge FlowDestination Flow Config Destination Connector Properties Event Bridge Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode
Flow
Destination Flow Config Destination Connector Properties Honeycode Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout_
metrics FlowDestination Flow Config Destination Connector Properties Lookout Metrics - marketo
Flow
Destination Flow Config Destination Connector Properties Marketo Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift
Flow
Destination Flow Config Destination Connector Properties Redshift Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3
Flow
Destination Flow Config Destination Connector Properties S3 Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce
Flow
Destination Flow Config Destination Connector Properties Salesforce Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo_
data FlowDestination Flow Config Destination Connector Properties Sapo Data Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake
Flow
Destination Flow Config Destination Connector Properties Snowflake Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver
Flow
Destination Flow Config Destination Connector Properties Upsolver Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk
Flow
Destination Flow Config Destination Connector Properties Zendesk Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
- custom
Connector Property Map Properties that are required to query the custom Connector. See Custom Connector Destination Properties for more details.
- customer
Profiles Property Map Properties that are required to query Amazon Connect Customer Profiles. See Customer Profiles Destination Properties for more details.
- event
Bridge Property Map Properties that are required to query Amazon EventBridge. See Generic Destination Properties for more details.
- honeycode Property Map
Properties that are required to query Amazon Honeycode. See Generic Destination Properties for more details.
- lookout
Metrics Property Map - marketo Property Map
Properties that are required to query Marketo. See Generic Destination Properties for more details.
- redshift Property Map
Properties that are required to query Amazon Redshift. See Redshift Destination Properties for more details.
- s3 Property Map
Properties that are required to query Amazon S3. See S3 Destination Properties for more details.
- salesforce Property Map
Properties that are required to query Salesforce. See Salesforce Destination Properties for more details.
- sapo
Data Property Map Properties that are required to query SAPOData. See SAPOData Destination Properties for more details.
- snowflake Property Map
Properties that are required to query Snowflake. See Snowflake Destination Properties for more details.
- upsolver Property Map
Properties that are required to query Upsolver. See Upsolver Destination Properties for more details.
- zendesk Property Map
Properties that are required to query Zendesk. See Zendesk Destination Properties for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnector, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorArgs
- Entity
Name string Entity specified in the custom connector as a destination in the flow.
- Custom
Properties Dictionary<string, string> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field List<string>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- Entity
Name string Entity specified in the custom connector as a destination in the flow.
- Custom
Properties map[string]string Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field []stringNames Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- entity
Name String Entity specified in the custom connector as a destination in the flow.
- custom
Properties Map<String,String> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- entity
Name string Entity specified in the custom connector as a destination in the flow.
- custom
Properties {[key: string]: string} Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field string[]Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- entity_
name str Entity specified in the custom connector as a destination in the flow.
- custom_
properties Mapping[str, str] Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Custom Connector Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id_
field_ Sequence[str]names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write_
operation_ strtype Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- entity
Name String Entity specified in the custom connector as a destination in the flow.
- custom
Properties Map<String> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomConnectorErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfiles, FlowDestinationFlowConfigDestinationConnectorPropertiesCustomerProfilesArgs
- Domain
Name string Unique name of the Amazon Connect Customer Profiles domain.
- Object
Type stringName Object specified in the Amazon Connect Customer Profiles flow destination.
- Domain
Name string Unique name of the Amazon Connect Customer Profiles domain.
- Object
Type stringName Object specified in the Amazon Connect Customer Profiles flow destination.
- domain
Name String Unique name of the Amazon Connect Customer Profiles domain.
- object
Type StringName Object specified in the Amazon Connect Customer Profiles flow destination.
- domain
Name string Unique name of the Amazon Connect Customer Profiles domain.
- object
Type stringName Object specified in the Amazon Connect Customer Profiles flow destination.
- domain_
name str Unique name of the Amazon Connect Customer Profiles domain.
- object_
type_ strname Object specified in the Amazon Connect Customer Profiles flow destination.
- domain
Name String Unique name of the Amazon Connect Customer Profiles domain.
- object
Type StringName Object specified in the Amazon Connect Customer Profiles flow destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridge, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeArgs
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Event Bridge Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Event Bridge Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Event Bridge Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object string
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Event Bridge Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object str
Object specified in the flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Event Bridge Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesEventBridgeErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycode, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeArgs
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Honeycode Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Honeycode Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Honeycode Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object string
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Honeycode Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object str
Object specified in the flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Honeycode Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesHoneycodeErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketo, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoArgs
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Marketo Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Marketo Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Marketo Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object string
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Marketo Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object str
Object specified in the flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Marketo Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- object String
Object specified in the flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesMarketoErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshift, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftArgs
- Intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- Object string
Object specified in the flow destination.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Redshift Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- Object string
Object specified in the flow destination.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Redshift Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket StringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object String
Object specified in the flow destination.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Redshift Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object string
Object specified in the flow destination.
- bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Redshift Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate_
bucket_ strname Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object str
Object specified in the flow destination.
- bucket_
prefix str Object key for the bucket in which Amazon AppFlow places the destination files.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Redshift Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket StringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object String
Object specified in the flow destination.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesRedshiftErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3, FlowDestinationFlowConfigDestinationConnectorPropertiesS3Args
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- S3Output
Format FlowConfig Destination Flow Config Destination Connector Properties S3S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- S3Output
Format FlowConfig Destination Flow Config Destination Connector Properties S3S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Output
Format FlowConfig Destination Flow Config Destination Connector Properties S3S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Output
Format FlowConfig Destination Flow Config Destination Connector Properties S3S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket_
name str Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket_
prefix str Object key for the bucket in which Amazon AppFlow places the destination files.
- s3_
output_ Flowformat_ config Destination Flow Config Destination Connector Properties S3S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Output
Format Property MapConfig Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigArgs
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- Prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Preserve
Source boolData Typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- Prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Preserve
Source boolData Typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source BooleanData Typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- prefix
Config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source booleanData Typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation_
config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_
type str File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- prefix_
config FlowDestination Flow Config Destination Connector Properties S3S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve_
source_ booldata_ typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
- aggregation
Config Property Map Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.- prefix
Config Property Map Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- preserve
Source BooleanData Typing Whether the data types from the source system need to be preserved (Only valid for
Parquet
file type)
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigAggregationConfigArgs
- Aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- Aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation_
type str Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesS3S3OutputFormatConfigPrefixConfigArgs
- Prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- Prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- Prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- Prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format String Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- prefix
Type String Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix_
format str Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- prefix_
type str Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
- prefix
Format String Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.- prefix
Type String Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforce, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceArgs
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Salesforce Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field List<string>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Salesforce Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field []stringNames Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object String
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Salesforce Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object string
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Salesforce Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field string[]Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object str
Object specified in the flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Salesforce Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id_
field_ Sequence[str]names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write_
operation_ strtype Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object String
Object specified in the flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSalesforceErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoData, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataArgs
- Object
Path string Object path specified in the SAPOData flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field List<string>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Success
Response FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- Object
Path string Object path specified in the SAPOData flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field []stringNames Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Success
Response FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object
Path String Object path specified in the SAPOData flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- success
Response FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object
Path string Object path specified in the SAPOData flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field string[]Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- success
Response FlowHandling Config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object_
path str Object path specified in the SAPOData flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Sapo Data Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id_
field_ Sequence[str]names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- success_
response_ Flowhandling_ config Destination Flow Config Destination Connector Properties Sapo Data Success Response Handling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- write_
operation_ strtype Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object
Path String Object path specified in the SAPOData flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- success
Response Property MapHandling Config Determines how Amazon AppFlow handles the success response that it gets from the connector after placing data. See Success Response Handling Config for more details.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSapoDataSuccessResponseHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflake, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeArgs
- Intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- Object string
Object specified in the flow destination.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Snowflake Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- Object string
Object specified in the flow destination.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Snowflake Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket StringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object String
Object specified in the flow destination.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Snowflake Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket stringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object string
Object specified in the flow destination.
- bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Snowflake Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate_
bucket_ strname Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object str
Object specified in the flow destination.
- bucket_
prefix str Object key for the bucket in which Amazon AppFlow places the destination files.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Snowflake Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- intermediate
Bucket StringName Intermediate bucket that Amazon AppFlow uses when moving data into Amazon Redshift.
- object String
Object specified in the flow destination.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesSnowflakeErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolver, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverArgs
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- S3Output
Format FlowConfig Destination Flow Config Destination Connector Properties Upsolver S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- S3Output
Format FlowConfig Destination Flow Config Destination Connector Properties Upsolver S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- s3Output
Format FlowConfig Destination Flow Config Destination Connector Properties Upsolver S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- s3Output
Format FlowConfig Destination Flow Config Destination Connector Properties Upsolver S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- bucket_
name str Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- s3_
output_ Flowformat_ config Destination Flow Config Destination Connector Properties Upsolver S3Output Format Config Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket_
prefix str Object key for the bucket in which Amazon AppFlow places the destination files.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- s3Output
Format Property MapConfig Configuration that determines how Amazon AppFlow should format the flow output data when Amazon S3 is used as the destination. See S3 Output Format Config for more details.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigArgs
- Prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- Prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- Aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- File
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type string File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix_
config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Prefix Config Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation_
config FlowDestination Flow Config Destination Connector Properties Upsolver S3Output Format Config Aggregation Config Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file_
type str File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
- prefix
Config Property Map Determines the prefix that Amazon AppFlow applies to the folder name in the Amazon S3 bucket. You can name folders according to the flow frequency and date. See Prefix Config for more details.
- aggregation
Config Property Map Aggregation settings that you can use to customize the output format of your flow data. See Aggregation Config for more details.
- file
Type String File type that Amazon AppFlow places in the Amazon S3 bucket. Valid values are
CSV
,JSON
, andPARQUET
.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigAggregationConfigArgs
- Aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- Aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type string Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation_
type str Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
- aggregation
Type String Whether Amazon AppFlow aggregates the flow records into a single file, or leave them unaggregated. Valid values are
None
andSingleFile
.
FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesUpsolverS3OutputFormatConfigPrefixConfigArgs
- Prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- Prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- Prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- Prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type String Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- prefix
Format String Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type string Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- prefix
Format string Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix_
type str Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- prefix_
format str Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
- prefix
Type String Determines the format of the prefix, and whether it applies to the file name, file path, or both. Valid values are
FILENAME
,PATH
, andPATH_AND_FILENAME
.- prefix
Format String Determines the level of granularity that's included in the prefix. Valid values are
YEAR
,MONTH
,DAY
,HOUR
, andMINUTE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesZendesk, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskArgs
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Zendesk Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field List<string>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- Object string
Object specified in the flow destination.
- Error
Handling FlowConfig Destination Flow Config Destination Connector Properties Zendesk Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- Id
Field []stringNames Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- Write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object String
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Zendesk Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object string
Object specified in the flow destination.
- error
Handling FlowConfig Destination Flow Config Destination Connector Properties Zendesk Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field string[]Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation stringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object str
Object specified in the flow destination.
- error_
handling_ Flowconfig Destination Flow Config Destination Connector Properties Zendesk Error Handling Config Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id_
field_ Sequence[str]names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write_
operation_ strtype Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
- object String
Object specified in the flow destination.
- error
Handling Property MapConfig Settings that determine how Amazon AppFlow handles an error when placing data in the destination. See Error Handling Config for more details.
- id
Field List<String>Names Name of the field that Amazon AppFlow uses as an ID when performing a write operation such as update, delete, or upsert.
- write
Operation StringType Type of write operation to be performed in the custom connector when it's used as destination. Valid values are
INSERT
,UPSERT
,UPDATE
, andDELETE
.
FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfig, FlowDestinationFlowConfigDestinationConnectorPropertiesZendeskErrorHandlingConfigArgs
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- Bucket
Name string Name of the Amazon S3 bucket.
- Bucket
Prefix string Amazon S3 bucket prefix.
- Fail
On boolFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name string Name of the Amazon S3 bucket.
- bucket
Prefix string Amazon S3 bucket prefix.
- fail
On booleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket_
name str Name of the Amazon S3 bucket.
- bucket_
prefix str Amazon S3 bucket prefix.
- fail_
on_ boolfirst_ destination_ error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
- bucket
Name String Name of the Amazon S3 bucket.
- bucket
Prefix String Amazon S3 bucket prefix.
- fail
On BooleanFirst Destination Error If the flow should fail after the first instance of a failure when attempting to place data in the destination.
FlowSourceFlowConfig, FlowSourceFlowConfigArgs
- Connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- Source
Connector FlowProperties Source Flow Config Source Connector Properties Information that is required to query a particular source connector. See Source Connector Properties for details.
- Api
Version string API version that the destination connector uses.
- Connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Incremental
Pull FlowConfig Source Flow Config Incremental Pull Config Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- Connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- Source
Connector FlowProperties Source Flow Config Source Connector Properties Information that is required to query a particular source connector. See Source Connector Properties for details.
- Api
Version string API version that the destination connector uses.
- Connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- Incremental
Pull FlowConfig Source Flow Config Incremental Pull Config Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type String Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- source
Connector FlowProperties Source Flow Config Source Connector Properties Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version String API version that the destination connector uses.
- connector
Profile StringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull FlowConfig Source Flow Config Incremental Pull Config Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type string Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- source
Connector FlowProperties Source Flow Config Source Connector Properties Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version string API version that the destination connector uses.
- connector
Profile stringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull FlowConfig Source Flow Config Incremental Pull Config Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector_
type str Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- source_
connector_ Flowproperties Source Flow Config Source Connector Properties Information that is required to query a particular source connector. See Source Connector Properties for details.
- api_
version str API version that the destination connector uses.
- connector_
profile_ strname Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental_
pull_ Flowconfig Source Flow Config Incremental Pull Config Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
- connector
Type String Type of connector, such as Salesforce, Amplitude, and so on. Valid values are
Salesforce
,Singular
,Slack
,Redshift
,S3
,Marketo
,Googleanalytics
,Zendesk
,Servicenow
,Datadog
,Trendmicro
,Snowflake
,Dynatrace
,Infornexus
,Amplitude
,Veeva
,EventBridge
,LookoutMetrics
,Upsolver
,Honeycode
,CustomerProfiles
,SAPOData
, andCustomConnector
.- source
Connector Property MapProperties Information that is required to query a particular source connector. See Source Connector Properties for details.
- api
Version String API version that the destination connector uses.
- connector
Profile StringName Name of the connector profile. This name must be unique for each connector profile in the AWS account.
- incremental
Pull Property MapConfig Defines the configuration for a scheduled incremental data pull. If a valid configuration is provided, the fields specified in the configuration are used when querying for the incremental data pull. See Incremental Pull Config for more details.
FlowSourceFlowConfigIncrementalPullConfig, FlowSourceFlowConfigIncrementalPullConfigArgs
- Datetime
Type stringField Name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- Datetime
Type stringField Name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type StringField Name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type stringField Name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime_
type_ strfield_ name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
- datetime
Type StringField Name Field that specifies the date time or timestamp field as the criteria to use when importing incremental records from the source.
FlowSourceFlowConfigSourceConnectorProperties, FlowSourceFlowConfigSourceConnectorPropertiesArgs
- Amplitude
Flow
Source Flow Config Source Connector Properties Amplitude Information that is required for querying Amplitude. See Generic Source Properties for more details.
- Custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
Flow
Source Flow Config Source Connector Properties Datadog Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- Infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
Flow
Source Flow Config Source Connector Properties Marketo Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
Flow
Source Flow Config Source Connector Properties S3 Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
Flow
Source Flow Config Source Connector Properties Salesforce Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- Sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- Service
Now FlowSource Flow Config Source Connector Properties Service Now Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
Flow
Source Flow Config Source Connector Properties Singular Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
Flow
Source Flow Config Source Connector Properties Slack Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Veeva
Flow
Source Flow Config Source Connector Properties Veeva Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
Flow
Source Flow Config Source Connector Properties Zendesk Information that is required for querying Zendesk. See Generic Source Properties for more details.
- Amplitude
Flow
Source Flow Config Source Connector Properties Amplitude Information that is required for querying Amplitude. See Generic Source Properties for more details.
- Custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- Datadog
Flow
Source Flow Config Source Connector Properties Datadog Information that is required for querying Datadog. See Generic Source Properties for more details.
- Dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- Infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- Marketo
Flow
Source Flow Config Source Connector Properties Marketo Information that is required for querying Marketo. See Generic Source Properties for more details.
- S3
Flow
Source Flow Config Source Connector Properties S3 Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- Salesforce
Flow
Source Flow Config Source Connector Properties Salesforce Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- Sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- Service
Now FlowSource Flow Config Source Connector Properties Service Now Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- Singular
Flow
Source Flow Config Source Connector Properties Singular Information that is required for querying Singular. See Generic Source Properties for more details.
- Slack
Flow
Source Flow Config Source Connector Properties Slack Information that is required for querying Slack. See Generic Source Properties for more details.
- Trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Veeva
Flow
Source Flow Config Source Connector Properties Veeva Information that is required for querying Veeva. See Veeva Source Properties for more details.
- Zendesk
Flow
Source Flow Config Source Connector Properties Zendesk Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now FlowSource Flow Config Source Connector Properties Service Now Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva
Flow
Source Flow Config Source Connector Properties Veeva Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector FlowSource Flow Config Source Connector Properties Custom Connector Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics FlowSource Flow Config Source Connector Properties Google Analytics Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus FlowSource Flow Config Source Connector Properties Infor Nexus Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data FlowSource Flow Config Source Connector Properties Sapo Data Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now FlowSource Flow Config Source Connector Properties Service Now Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva
Flow
Source Flow Config Source Connector Properties Veeva Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude
Flow
Source Flow Config Source Connector Properties Amplitude Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom_
connector FlowSource Flow Config Source Connector Properties Custom Connector Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog
Flow
Source Flow Config Source Connector Properties Datadog Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace
Flow
Source Flow Config Source Connector Properties Dynatrace Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google_
analytics FlowSource Flow Config Source Connector Properties Google Analytics Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor_
nexus FlowSource Flow Config Source Connector Properties Infor Nexus Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo
Flow
Source Flow Config Source Connector Properties Marketo Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3
Flow
Source Flow Config Source Connector Properties S3 Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce
Flow
Source Flow Config Source Connector Properties Salesforce Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo_
data FlowSource Flow Config Source Connector Properties Sapo Data Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service_
now FlowSource Flow Config Source Connector Properties Service Now Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular
Flow
Source Flow Config Source Connector Properties Singular Information that is required for querying Singular. See Generic Source Properties for more details.
- slack
Flow
Source Flow Config Source Connector Properties Slack Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro
Flow
Source Flow Config Source Connector Properties Trendmicro Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva
Flow
Source Flow Config Source Connector Properties Veeva Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk
Flow
Source Flow Config Source Connector Properties Zendesk Information that is required for querying Zendesk. See Generic Source Properties for more details.
- amplitude Property Map
Information that is required for querying Amplitude. See Generic Source Properties for more details.
- custom
Connector Property Map Properties that are applied when the custom connector is being used as a source. See Custom Connector Source Properties.
- datadog Property Map
Information that is required for querying Datadog. See Generic Source Properties for more details.
- dynatrace Property Map
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics Property Map Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus Property Map Information that is required for querying Infor Nexus. See Generic Source Properties for more details.
- marketo Property Map
Information that is required for querying Marketo. See Generic Source Properties for more details.
- s3 Property Map
Information that is required for querying Amazon S3. See S3 Source Properties for more details.
- salesforce Property Map
Information that is required for querying Salesforce. See Salesforce Source Properties for more details.
- sapo
Data Property Map Information that is required for querying SAPOData as a flow source. See SAPO Source Properties for more details.
- service
Now Property Map Information that is required for querying ServiceNow. See Generic Source Properties for more details.
- singular Property Map
Information that is required for querying Singular. See Generic Source Properties for more details.
- slack Property Map
Information that is required for querying Slack. See Generic Source Properties for more details.
- trendmicro Property Map
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva Property Map
Information that is required for querying Veeva. See Veeva Source Properties for more details.
- zendesk Property Map
Information that is required for querying Zendesk. See Generic Source Properties for more details.
FlowSourceFlowConfigSourceConnectorPropertiesAmplitude, FlowSourceFlowConfigSourceConnectorPropertiesAmplitudeArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesCustomConnector, FlowSourceFlowConfigSourceConnectorPropertiesCustomConnectorArgs
- Entity
Name string Entity specified in the custom connector as a destination in the flow.
- Custom
Properties Dictionary<string, string> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- Entity
Name string Entity specified in the custom connector as a destination in the flow.
- Custom
Properties map[string]string Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- entity
Name String Entity specified in the custom connector as a destination in the flow.
- custom
Properties Map<String,String> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- entity
Name string Entity specified in the custom connector as a destination in the flow.
- custom
Properties {[key: string]: string} Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- entity_
name str Entity specified in the custom connector as a destination in the flow.
- custom_
properties Mapping[str, str] Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
- entity
Name String Entity specified in the custom connector as a destination in the flow.
- custom
Properties Map<String> Custom properties that are specific to the connector when it's used as a destination in the flow. Maximum of 50 items.
FlowSourceFlowConfigSourceConnectorPropertiesDatadog, FlowSourceFlowConfigSourceConnectorPropertiesDatadogArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesDynatrace, FlowSourceFlowConfigSourceConnectorPropertiesDynatraceArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalytics, FlowSourceFlowConfigSourceConnectorPropertiesGoogleAnalyticsArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesInforNexus, FlowSourceFlowConfigSourceConnectorPropertiesInforNexusArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesMarketo, FlowSourceFlowConfigSourceConnectorPropertiesMarketoArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesS3, FlowSourceFlowConfigSourceConnectorPropertiesS3Args
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- S3Input
Format FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- Bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- Bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- S3Input
Format FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Input
Format FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucket
Name string Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix string Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Input
Format FlowConfig Source Flow Config Source Connector Properties S3S3Input Format Config When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucket_
name str Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket_
prefix str Object key for the bucket in which Amazon AppFlow places the destination files.
- s3_
input_ Flowformat_ config Source Flow Config Source Connector Properties S3S3Input Format Config When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
- bucket
Name String Amazon S3 bucket name in which Amazon AppFlow places the transferred data.
- bucket
Prefix String Object key for the bucket in which Amazon AppFlow places the destination files.
- s3Input
Format Property MapConfig When you use Amazon S3 as the source, the configuration format that you provide the flow input data. See S3 Input Format Config for details.
FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfig, FlowSourceFlowConfigSourceConnectorPropertiesS3S3InputFormatConfigArgs
- S3Input
File stringType File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- S3Input
File stringType File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File StringType File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File stringType File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3_
input_ strfile_ type File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
- s3Input
File StringType File type that Amazon AppFlow gets from your Amazon S3 bucket. Valid values are
CSV
andJSON
.
FlowSourceFlowConfigSourceConnectorPropertiesSalesforce, FlowSourceFlowConfigSourceConnectorPropertiesSalesforceArgs
- Object string
Object specified in the flow destination.
- Enable
Dynamic boolField Update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- Include
Deleted boolRecords Whether Amazon AppFlow includes deleted files in the flow run.
- Object string
Object specified in the flow destination.
- Enable
Dynamic boolField Update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- Include
Deleted boolRecords Whether Amazon AppFlow includes deleted files in the flow run.
- object String
Object specified in the flow destination.
- enable
Dynamic BooleanField Update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- include
Deleted BooleanRecords Whether Amazon AppFlow includes deleted files in the flow run.
- object string
Object specified in the flow destination.
- enable
Dynamic booleanField Update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- include
Deleted booleanRecords Whether Amazon AppFlow includes deleted files in the flow run.
- object str
Object specified in the flow destination.
- enable_
dynamic_ boolfield_ update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- include_
deleted_ boolrecords Whether Amazon AppFlow includes deleted files in the flow run.
- object String
Object specified in the flow destination.
- enable
Dynamic BooleanField Update Flag that enables dynamic fetching of new (recently added) fields in the Salesforce objects while running a flow.
- include
Deleted BooleanRecords Whether Amazon AppFlow includes deleted files in the flow run.
FlowSourceFlowConfigSourceConnectorPropertiesSapoData, FlowSourceFlowConfigSourceConnectorPropertiesSapoDataArgs
- Object
Path string Object path specified in the SAPOData flow destination.
- Object
Path string Object path specified in the SAPOData flow destination.
- object
Path String Object path specified in the SAPOData flow destination.
- object
Path string Object path specified in the SAPOData flow destination.
- object_
path str Object path specified in the SAPOData flow destination.
- object
Path String Object path specified in the SAPOData flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesServiceNow, FlowSourceFlowConfigSourceConnectorPropertiesServiceNowArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesSingular, FlowSourceFlowConfigSourceConnectorPropertiesSingularArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesSlack, FlowSourceFlowConfigSourceConnectorPropertiesSlackArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesTrendmicro, FlowSourceFlowConfigSourceConnectorPropertiesTrendmicroArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowSourceFlowConfigSourceConnectorPropertiesVeeva, FlowSourceFlowConfigSourceConnectorPropertiesVeevaArgs
- Object string
Object specified in the flow destination.
- Document
Type string Document type specified in the Veeva document extract flow.
- Include
All boolVersions Boolean value to include All Versions of files in Veeva document extract flow.
- Include
Renditions bool Boolean value to include file renditions in Veeva document extract flow.
- Include
Source boolFiles Boolean value to include source files in Veeva document extract flow.
- Object string
Object specified in the flow destination.
- Document
Type string Document type specified in the Veeva document extract flow.
- Include
All boolVersions Boolean value to include All Versions of files in Veeva document extract flow.
- Include
Renditions bool Boolean value to include file renditions in Veeva document extract flow.
- Include
Source boolFiles Boolean value to include source files in Veeva document extract flow.
- object String
Object specified in the flow destination.
- document
Type String Document type specified in the Veeva document extract flow.
- include
All BooleanVersions Boolean value to include All Versions of files in Veeva document extract flow.
- include
Renditions Boolean Boolean value to include file renditions in Veeva document extract flow.
- include
Source BooleanFiles Boolean value to include source files in Veeva document extract flow.
- object string
Object specified in the flow destination.
- document
Type string Document type specified in the Veeva document extract flow.
- include
All booleanVersions Boolean value to include All Versions of files in Veeva document extract flow.
- include
Renditions boolean Boolean value to include file renditions in Veeva document extract flow.
- include
Source booleanFiles Boolean value to include source files in Veeva document extract flow.
- object str
Object specified in the flow destination.
- document_
type str Document type specified in the Veeva document extract flow.
- include_
all_ boolversions Boolean value to include All Versions of files in Veeva document extract flow.
- include_
renditions bool Boolean value to include file renditions in Veeva document extract flow.
- include_
source_ boolfiles Boolean value to include source files in Veeva document extract flow.
- object String
Object specified in the flow destination.
- document
Type String Document type specified in the Veeva document extract flow.
- include
All BooleanVersions Boolean value to include All Versions of files in Veeva document extract flow.
- include
Renditions Boolean Boolean value to include file renditions in Veeva document extract flow.
- include
Source BooleanFiles Boolean value to include source files in Veeva document extract flow.
FlowSourceFlowConfigSourceConnectorPropertiesZendesk, FlowSourceFlowConfigSourceConnectorPropertiesZendeskArgs
- Object string
Object specified in the flow destination.
- Object string
Object specified in the flow destination.
- object String
Object specified in the flow destination.
- object string
Object specified in the flow destination.
- object str
Object specified in the flow destination.
- object String
Object specified in the flow destination.
FlowTask, FlowTaskArgs
- Source
Fields List<string> Source fields to which a particular task is applied.
- Task
Type string Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- Connector
Operators List<FlowTask Connector Operator> Operation to be performed on the provided source fields. See Connector Operator for details.
- Destination
Field string Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- Task
Properties Dictionary<string, string> Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- Source
Fields []string Source fields to which a particular task is applied.
- Task
Type string Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- Connector
Operators []FlowTask Connector Operator Operation to be performed on the provided source fields. See Connector Operator for details.
- Destination
Field string Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- Task
Properties map[string]string Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- source
Fields List<String> Source fields to which a particular task is applied.
- task
Type String Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- connector
Operators List<FlowTask Connector Operator> Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field String Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- task
Properties Map<String,String> Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- source
Fields string[] Source fields to which a particular task is applied.
- task
Type string Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- connector
Operators FlowTask Connector Operator[] Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field string Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- task
Properties {[key: string]: string} Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- source_
fields Sequence[str] Source fields to which a particular task is applied.
- task_
type str Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- connector_
operators Sequence[FlowTask Connector Operator] Operation to be performed on the provided source fields. See Connector Operator for details.
- destination_
field str Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- task_
properties Mapping[str, str] Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
- source
Fields List<String> Source fields to which a particular task is applied.
- task
Type String Particular task implementation that Amazon AppFlow performs. Valid values are
Arithmetic
,Filter
,Map
,Map_all
,Mask
,Merge
,Passthrough
,Truncate
, andValidate
.- connector
Operators List<Property Map> Operation to be performed on the provided source fields. See Connector Operator for details.
- destination
Field String Field in a destination connector, or a field value against which Amazon AppFlow validates a source field.
- task
Properties Map<String> Map used to store task-related information. The execution service looks for particular information based on the
TaskType
. Valid keys areVALUE
,VALUES
,DATA_TYPE
,UPPER_BOUND
,LOWER_BOUND
,SOURCE_DATA_TYPE
,DESTINATION_DATA_TYPE
,VALIDATION_ACTION
,MASK_VALUE
,MASK_LENGTH
,TRUNCATE_LENGTH
,MATH_OPERATION_FIELDS_ORDER
,CONCAT_FORMAT
,SUBFIELD_CATEGORY_MAP
, andEXCLUDE_SOURCE_FIELDS_LIST
.
FlowTaskConnectorOperator, FlowTaskConnectorOperatorArgs
- Amplitude string
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- Custom
Connector string Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Datadog string
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Dynatrace string
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Google
Analytics string Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- Infor
Nexus string Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Marketo string
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- S3 string
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Salesforce string
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Sapo
Data string Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Service
Now string Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Singular string
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Slack string
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Trendmicro string
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Veeva string
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Zendesk string
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- Amplitude string
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- Custom
Connector string Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Datadog string
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Dynatrace string
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Google
Analytics string Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- Infor
Nexus string Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Marketo string
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- S3 string
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Salesforce string
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Sapo
Data string Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Service
Now string Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Singular string
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Slack string
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Trendmicro string
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Veeva string
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- Zendesk string
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude String
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- custom
Connector String Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- datadog String
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- dynatrace String
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics String Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus String Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- marketo String
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- s3 String
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- salesforce String
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- sapo
Data String Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- service
Now String Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- singular String
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- slack String
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- trendmicro String
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva String
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- zendesk String
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude string
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- custom
Connector string Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- datadog string
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- dynatrace string
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics string Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus string Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- marketo string
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- s3 string
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- salesforce string
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- sapo
Data string Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- service
Now string Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- singular string
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- slack string
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- trendmicro string
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva string
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- zendesk string
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude str
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- custom_
connector str Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- datadog str
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- dynatrace str
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google_
analytics str Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor_
nexus str Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- marketo str
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- s3 str
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- salesforce str
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- sapo_
data str Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- service_
now str Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- singular str
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- slack str
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- trendmicro str
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva str
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- zendesk str
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
- amplitude String
Operation to be performed on the provided Amplitude source fields. The only valid value is
BETWEEN
.- custom
Connector String Operators supported by the custom connector. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- datadog String
Operation to be performed on the provided Datadog source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- dynatrace String
Operation to be performed on the provided Dynatrace source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- google
Analytics String Operation to be performed on the provided Google Analytics source fields. Valid values are
PROJECTION
andBETWEEN
.- infor
Nexus String Operation to be performed on the provided Infor Nexus source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- marketo String
Operation to be performed on the provided Marketo source fields. Valid values are
PROJECTION
,BETWEEN
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- s3 String
Operation to be performed on the provided Amazon S3 source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- salesforce String
Operation to be performed on the provided Salesforce source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- sapo
Data String Operation to be performed on the provided SAPOData source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- service
Now String Operation to be performed on the provided ServiceNow source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- singular String
Operation to be performed on the provided Singular source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- slack String
Operation to be performed on the provided Slack source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- trendmicro String
Operation to be performed on the provided Trend Micro source fields. Valid values are
PROJECTION
,EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- veeva String
Operation to be performed on the provided Veeva source fields. Valid values are
PROJECTION
,LESS_THAN
,GREATER_THAN
,CONTAINS
,BETWEEN
,LESS_THAN_OR_EQUAL_TO
,GREATER_THAN_OR_EQUAL_TO
,EQUAL_TO
,NOT_EQUAL_TO
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.- zendesk String
Operation to be performed on the provided Zendesk source fields. Valid values are
PROJECTION
,GREATER_THAN
,ADDITION
,MULTIPLICATION
,DIVISION
,SUBTRACTION
,MASK_ALL
,MASK_FIRST_N
,MASK_LAST_N
,VALIDATE_NON_NULL
,VALIDATE_NON_ZERO
,VALIDATE_NON_NEGATIVE
,VALIDATE_NUMERIC
, andNO_OP
.
FlowTriggerConfig, FlowTriggerConfigArgs
- Trigger
Type string Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- Trigger
Properties FlowTrigger Config Trigger Properties Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- Trigger
Type string Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- Trigger
Properties FlowTrigger Config Trigger Properties Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type String Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- trigger
Properties FlowTrigger Config Trigger Properties Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type string Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- trigger
Properties FlowTrigger Config Trigger Properties Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger_
type str Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- trigger_
properties FlowTrigger Config Trigger Properties Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
- trigger
Type String Type of flow trigger. Valid values are
Scheduled
,Event
, andOnDemand
.- trigger
Properties Property Map Configuration details of a schedule-triggered flow as defined by the user. Currently, these settings only apply to the
Scheduled
trigger type. See Scheduled Trigger Properties for details.
FlowTriggerConfigTriggerProperties, FlowTriggerConfigTriggerPropertiesArgs
FlowTriggerConfigTriggerPropertiesScheduled, FlowTriggerConfigTriggerPropertiesScheduledArgs
- Schedule
Expression string Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- Data
Pull stringMode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- First
Execution stringFrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- Schedule
End stringTime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Schedule
Offset int Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- Schedule
Start stringTime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- Schedule
Expression string Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- Data
Pull stringMode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- First
Execution stringFrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- Schedule
End stringTime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Schedule
Offset int Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- Schedule
Start stringTime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- Timezone string
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression String Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- data
Pull StringMode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- first
Execution StringFrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End StringTime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset Integer Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start StringTime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression string Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- data
Pull stringMode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- first
Execution stringFrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End stringTime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset number Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start stringTime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone string
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule_
expression str Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- data_
pull_ strmode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- first_
execution_ strfrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule_
end_ strtime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule_
offset int Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule_
start_ strtime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone str
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
- schedule
Expression String Scheduling expression that determines the rate at which the schedule will run, for example
rate(5minutes)
.- data
Pull StringMode Whether a scheduled flow has an incremental data transfer or a complete data transfer for each flow run. Valid values are
Incremental
andComplete
.- first
Execution StringFrom Date range for the records to import from the connector in the first flow run. Must be a valid RFC3339 timestamp.
- schedule
End StringTime Scheduled end time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- schedule
Offset Number Optional offset that is added to the time interval for a schedule-triggered flow. Maximum value of 36000.
- schedule
Start StringTime Scheduled start time for a schedule-triggered flow. Must be a valid RFC3339 timestamp.
- timezone String
Time zone used when referring to the date and time of a scheduled-triggered flow, such as
America/New_York
.package generated_program;
import com.pulumi.Context; import com.pulumi.Pulumi; import com.pulumi.core.Output; import com.pulumi.aws.appflow.Flow; import com.pulumi.aws.appflow.FlowArgs; import com.pulumi.aws.appflow.inputs.FlowTriggerConfigArgs; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.io.File; import java.nio.file.Files; import java.nio.file.Paths;
public class App { public static void main(String[] args) { Pulumi.run(App::stack); }
public static void stack(Context ctx) { var example = new Flow("example", FlowArgs.builder() .triggerConfig(FlowTriggerConfigArgs.builder() .scheduled(%!v(PANIC=Format method: runtime error: invalid memory address or nil pointer dereference)) .build()) .build()); }
}
resources: example: type: aws:appflow:Flow properties: triggerConfig: scheduled: - scheduleExpression: rate(1minutes)
Import
Using pulumi import
, import AppFlow flows using the arn
. For example:
$ pulumi import aws:appflow/flow:Flow example arn:aws:appflow:us-west-2:123456789012:flow/example-flow
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
aws
Terraform Provider.
Try AWS Native preview for resources not in the classic version.