1. Packages
  2. Tencentcloud Provider
  3. API Docs
  4. CkafkaDatahubTask
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack

tencentcloud.CkafkaDatahubTask

Explore with Pulumi AI

tencentcloud logo
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack

    Provides a resource to create a ckafka datahub_task

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as tencentcloud from "@pulumi/tencentcloud";
    
    const datahubTask = new tencentcloud.CkafkaDatahubTask("datahubTask", {
        sourceResource: {
            postgreSqlParam: {
                database: "postgres",
                isTableRegular: false,
                keyColumns: "",
                pluginName: "decoderbufs",
                recordWithSchema: false,
                resource: "resource-y9nxnw46",
                snapshotMode: "never",
                table: "*",
            },
            type: "POSTGRESQL",
        },
        targetResource: {
            topicParam: {
                compressionType: "none",
                resource: "1308726196-keep-topic",
                useAutoCreateTopic: false,
            },
            type: "TOPIC",
        },
        taskName: "test-task123321",
        taskType: "SOURCE",
    });
    
    import pulumi
    import pulumi_tencentcloud as tencentcloud
    
    datahub_task = tencentcloud.CkafkaDatahubTask("datahubTask",
        source_resource={
            "postgre_sql_param": {
                "database": "postgres",
                "is_table_regular": False,
                "key_columns": "",
                "plugin_name": "decoderbufs",
                "record_with_schema": False,
                "resource": "resource-y9nxnw46",
                "snapshot_mode": "never",
                "table": "*",
            },
            "type": "POSTGRESQL",
        },
        target_resource={
            "topic_param": {
                "compression_type": "none",
                "resource": "1308726196-keep-topic",
                "use_auto_create_topic": False,
            },
            "type": "TOPIC",
        },
        task_name="test-task123321",
        task_type="SOURCE")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := tencentcloud.NewCkafkaDatahubTask(ctx, "datahubTask", &tencentcloud.CkafkaDatahubTaskArgs{
    			SourceResource: &tencentcloud.CkafkaDatahubTaskSourceResourceArgs{
    				PostgreSqlParam: &tencentcloud.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs{
    					Database:         pulumi.String("postgres"),
    					IsTableRegular:   pulumi.Bool(false),
    					KeyColumns:       pulumi.String(""),
    					PluginName:       pulumi.String("decoderbufs"),
    					RecordWithSchema: pulumi.Bool(false),
    					Resource:         pulumi.String("resource-y9nxnw46"),
    					SnapshotMode:     pulumi.String("never"),
    					Table:            pulumi.String("*"),
    				},
    				Type: pulumi.String("POSTGRESQL"),
    			},
    			TargetResource: &tencentcloud.CkafkaDatahubTaskTargetResourceArgs{
    				TopicParam: &tencentcloud.CkafkaDatahubTaskTargetResourceTopicParamArgs{
    					CompressionType:    pulumi.String("none"),
    					Resource:           pulumi.String("1308726196-keep-topic"),
    					UseAutoCreateTopic: pulumi.Bool(false),
    				},
    				Type: pulumi.String("TOPIC"),
    			},
    			TaskName: pulumi.String("test-task123321"),
    			TaskType: pulumi.String("SOURCE"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Tencentcloud = Pulumi.Tencentcloud;
    
    return await Deployment.RunAsync(() => 
    {
        var datahubTask = new Tencentcloud.CkafkaDatahubTask("datahubTask", new()
        {
            SourceResource = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourceArgs
            {
                PostgreSqlParam = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs
                {
                    Database = "postgres",
                    IsTableRegular = false,
                    KeyColumns = "",
                    PluginName = "decoderbufs",
                    RecordWithSchema = false,
                    Resource = "resource-y9nxnw46",
                    SnapshotMode = "never",
                    Table = "*",
                },
                Type = "POSTGRESQL",
            },
            TargetResource = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceArgs
            {
                TopicParam = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs
                {
                    CompressionType = "none",
                    Resource = "1308726196-keep-topic",
                    UseAutoCreateTopic = false,
                },
                Type = "TOPIC",
            },
            TaskName = "test-task123321",
            TaskType = "SOURCE",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.tencentcloud.CkafkaDatahubTask;
    import com.pulumi.tencentcloud.CkafkaDatahubTaskArgs;
    import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourceArgs;
    import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs;
    import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceArgs;
    import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var datahubTask = new CkafkaDatahubTask("datahubTask", CkafkaDatahubTaskArgs.builder()
                .sourceResource(CkafkaDatahubTaskSourceResourceArgs.builder()
                    .postgreSqlParam(CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs.builder()
                        .database("postgres")
                        .isTableRegular(false)
                        .keyColumns("")
                        .pluginName("decoderbufs")
                        .recordWithSchema(false)
                        .resource("resource-y9nxnw46")
                        .snapshotMode("never")
                        .table("*")
                        .build())
                    .type("POSTGRESQL")
                    .build())
                .targetResource(CkafkaDatahubTaskTargetResourceArgs.builder()
                    .topicParam(CkafkaDatahubTaskTargetResourceTopicParamArgs.builder()
                        .compressionType("none")
                        .resource("1308726196-keep-topic")
                        .useAutoCreateTopic(false)
                        .build())
                    .type("TOPIC")
                    .build())
                .taskName("test-task123321")
                .taskType("SOURCE")
                .build());
    
        }
    }
    
    resources:
      datahubTask:
        type: tencentcloud:CkafkaDatahubTask
        properties:
          sourceResource:
            postgreSqlParam:
              database: postgres
              isTableRegular: false
              keyColumns: ""
              pluginName: decoderbufs
              recordWithSchema: false
              resource: resource-y9nxnw46
              snapshotMode: never
              table: '*'
            type: POSTGRESQL
          targetResource:
            topicParam:
              compressionType: none
              resource: 1308726196-keep-topic
              useAutoCreateTopic: false
            type: TOPIC
          taskName: test-task123321
          taskType: SOURCE
    

    Create CkafkaDatahubTask Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new CkafkaDatahubTask(name: string, args: CkafkaDatahubTaskArgs, opts?: CustomResourceOptions);
    @overload
    def CkafkaDatahubTask(resource_name: str,
                          args: CkafkaDatahubTaskArgs,
                          opts: Optional[ResourceOptions] = None)
    
    @overload
    def CkafkaDatahubTask(resource_name: str,
                          opts: Optional[ResourceOptions] = None,
                          task_name: Optional[str] = None,
                          task_type: Optional[str] = None,
                          ckafka_datahub_task_id: Optional[str] = None,
                          schema_id: Optional[str] = None,
                          source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
                          target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
                          transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
                          transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None)
    func NewCkafkaDatahubTask(ctx *Context, name string, args CkafkaDatahubTaskArgs, opts ...ResourceOption) (*CkafkaDatahubTask, error)
    public CkafkaDatahubTask(string name, CkafkaDatahubTaskArgs args, CustomResourceOptions? opts = null)
    public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args)
    public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args, CustomResourceOptions options)
    
    type: tencentcloud:CkafkaDatahubTask
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args CkafkaDatahubTaskArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args CkafkaDatahubTaskArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args CkafkaDatahubTaskArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args CkafkaDatahubTaskArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args CkafkaDatahubTaskArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    CkafkaDatahubTask Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The CkafkaDatahubTask resource accepts the following input properties:

    TaskName string
    name of the task.
    TaskType string
    type of the task, SOURCE(data input), SINK(data output).
    CkafkaDatahubTaskId string
    ID of the resource.
    SchemaId string
    SchemaId.
    SourceResource CkafkaDatahubTaskSourceResource
    data resource.
    TargetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    TransformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    TransformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    TaskName string
    name of the task.
    TaskType string
    type of the task, SOURCE(data input), SINK(data output).
    CkafkaDatahubTaskId string
    ID of the resource.
    SchemaId string
    SchemaId.
    SourceResource CkafkaDatahubTaskSourceResourceArgs
    data resource.
    TargetResource CkafkaDatahubTaskTargetResourceArgs
    Target Resource.
    TransformParam CkafkaDatahubTaskTransformParamArgs
    Data Processing Rules.
    TransformsParam CkafkaDatahubTaskTransformsParamArgs
    Data processing rules.
    taskName String
    name of the task.
    taskType String
    type of the task, SOURCE(data input), SINK(data output).
    ckafkaDatahubTaskId String
    ID of the resource.
    schemaId String
    SchemaId.
    sourceResource CkafkaDatahubTaskSourceResource
    data resource.
    targetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    transformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    transformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    taskName string
    name of the task.
    taskType string
    type of the task, SOURCE(data input), SINK(data output).
    ckafkaDatahubTaskId string
    ID of the resource.
    schemaId string
    SchemaId.
    sourceResource CkafkaDatahubTaskSourceResource
    data resource.
    targetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    transformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    transformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    task_name str
    name of the task.
    task_type str
    type of the task, SOURCE(data input), SINK(data output).
    ckafka_datahub_task_id str
    ID of the resource.
    schema_id str
    SchemaId.
    source_resource CkafkaDatahubTaskSourceResourceArgs
    data resource.
    target_resource CkafkaDatahubTaskTargetResourceArgs
    Target Resource.
    transform_param CkafkaDatahubTaskTransformParamArgs
    Data Processing Rules.
    transforms_param CkafkaDatahubTaskTransformsParamArgs
    Data processing rules.
    taskName String
    name of the task.
    taskType String
    type of the task, SOURCE(data input), SINK(data output).
    ckafkaDatahubTaskId String
    ID of the resource.
    schemaId String
    SchemaId.
    sourceResource Property Map
    data resource.
    targetResource Property Map
    Target Resource.
    transformParam Property Map
    Data Processing Rules.
    transformsParam Property Map
    Data processing rules.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the CkafkaDatahubTask resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing CkafkaDatahubTask Resource

    Get an existing CkafkaDatahubTask resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: CkafkaDatahubTaskState, opts?: CustomResourceOptions): CkafkaDatahubTask
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            ckafka_datahub_task_id: Optional[str] = None,
            schema_id: Optional[str] = None,
            source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
            target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
            task_name: Optional[str] = None,
            task_type: Optional[str] = None,
            transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
            transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None) -> CkafkaDatahubTask
    func GetCkafkaDatahubTask(ctx *Context, name string, id IDInput, state *CkafkaDatahubTaskState, opts ...ResourceOption) (*CkafkaDatahubTask, error)
    public static CkafkaDatahubTask Get(string name, Input<string> id, CkafkaDatahubTaskState? state, CustomResourceOptions? opts = null)
    public static CkafkaDatahubTask get(String name, Output<String> id, CkafkaDatahubTaskState state, CustomResourceOptions options)
    resources:  _:    type: tencentcloud:CkafkaDatahubTask    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    CkafkaDatahubTaskId string
    ID of the resource.
    SchemaId string
    SchemaId.
    SourceResource CkafkaDatahubTaskSourceResource
    data resource.
    TargetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    TaskName string
    name of the task.
    TaskType string
    type of the task, SOURCE(data input), SINK(data output).
    TransformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    TransformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    CkafkaDatahubTaskId string
    ID of the resource.
    SchemaId string
    SchemaId.
    SourceResource CkafkaDatahubTaskSourceResourceArgs
    data resource.
    TargetResource CkafkaDatahubTaskTargetResourceArgs
    Target Resource.
    TaskName string
    name of the task.
    TaskType string
    type of the task, SOURCE(data input), SINK(data output).
    TransformParam CkafkaDatahubTaskTransformParamArgs
    Data Processing Rules.
    TransformsParam CkafkaDatahubTaskTransformsParamArgs
    Data processing rules.
    ckafkaDatahubTaskId String
    ID of the resource.
    schemaId String
    SchemaId.
    sourceResource CkafkaDatahubTaskSourceResource
    data resource.
    targetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    taskName String
    name of the task.
    taskType String
    type of the task, SOURCE(data input), SINK(data output).
    transformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    transformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    ckafkaDatahubTaskId string
    ID of the resource.
    schemaId string
    SchemaId.
    sourceResource CkafkaDatahubTaskSourceResource
    data resource.
    targetResource CkafkaDatahubTaskTargetResource
    Target Resource.
    taskName string
    name of the task.
    taskType string
    type of the task, SOURCE(data input), SINK(data output).
    transformParam CkafkaDatahubTaskTransformParam
    Data Processing Rules.
    transformsParam CkafkaDatahubTaskTransformsParam
    Data processing rules.
    ckafka_datahub_task_id str
    ID of the resource.
    schema_id str
    SchemaId.
    source_resource CkafkaDatahubTaskSourceResourceArgs
    data resource.
    target_resource CkafkaDatahubTaskTargetResourceArgs
    Target Resource.
    task_name str
    name of the task.
    task_type str
    type of the task, SOURCE(data input), SINK(data output).
    transform_param CkafkaDatahubTaskTransformParamArgs
    Data Processing Rules.
    transforms_param CkafkaDatahubTaskTransformsParamArgs
    Data processing rules.
    ckafkaDatahubTaskId String
    ID of the resource.
    schemaId String
    SchemaId.
    sourceResource Property Map
    data resource.
    targetResource Property Map
    Target Resource.
    taskName String
    name of the task.
    taskType String
    type of the task, SOURCE(data input), SINK(data output).
    transformParam Property Map
    Data Processing Rules.
    transformsParam Property Map
    Data processing rules.

    Supporting Types

    CkafkaDatahubTaskSourceResource, CkafkaDatahubTaskSourceResourceArgs

    Type string
    resource type.
    ClickHouseParam CkafkaDatahubTaskSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParam CkafkaDatahubTaskSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParam CkafkaDatahubTaskSourceResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParam CkafkaDatahubTaskSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParam CkafkaDatahubTaskSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParam CkafkaDatahubTaskSourceResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParam CkafkaDatahubTaskSourceResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParam CkafkaDatahubTaskSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParam CkafkaDatahubTaskSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParam CkafkaDatahubTaskSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParam CkafkaDatahubTaskSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParam CkafkaDatahubTaskSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParam CkafkaDatahubTaskSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParam CkafkaDatahubTaskSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParam CkafkaDatahubTaskSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParam CkafkaDatahubTaskSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    resource type.
    ClickHouseParam CkafkaDatahubTaskSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParam CkafkaDatahubTaskSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParam CkafkaDatahubTaskSourceResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParam CkafkaDatahubTaskSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParam CkafkaDatahubTaskSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParam CkafkaDatahubTaskSourceResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParam CkafkaDatahubTaskSourceResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParam CkafkaDatahubTaskSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParam CkafkaDatahubTaskSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParam CkafkaDatahubTaskSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParam CkafkaDatahubTaskSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParam CkafkaDatahubTaskSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParam CkafkaDatahubTaskSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParam CkafkaDatahubTaskSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParam CkafkaDatahubTaskSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParam CkafkaDatahubTaskSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type String
    resource type.
    clickHouseParam CkafkaDatahubTaskSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam CkafkaDatahubTaskSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    cosParam CkafkaDatahubTaskSourceResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdbParam CkafkaDatahubTaskSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam CkafkaDatahubTaskSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    esParam CkafkaDatahubTaskSourceResourceEsParam
    Es configuration, required when Type is ES.
    eventBusParam CkafkaDatahubTaskSourceResourceEventBusParam
    EB configuration, required when type is EB.
    kafkaParam CkafkaDatahubTaskSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam CkafkaDatahubTaskSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam CkafkaDatahubTaskSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    mySqlParam CkafkaDatahubTaskSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam CkafkaDatahubTaskSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam CkafkaDatahubTaskSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    sqlServerParam CkafkaDatahubTaskSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam CkafkaDatahubTaskSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topicParam CkafkaDatahubTaskSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type string
    resource type.
    clickHouseParam CkafkaDatahubTaskSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam CkafkaDatahubTaskSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    cosParam CkafkaDatahubTaskSourceResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdbParam CkafkaDatahubTaskSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam CkafkaDatahubTaskSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    esParam CkafkaDatahubTaskSourceResourceEsParam
    Es configuration, required when Type is ES.
    eventBusParam CkafkaDatahubTaskSourceResourceEventBusParam
    EB configuration, required when type is EB.
    kafkaParam CkafkaDatahubTaskSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam CkafkaDatahubTaskSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam CkafkaDatahubTaskSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    mySqlParam CkafkaDatahubTaskSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam CkafkaDatahubTaskSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam CkafkaDatahubTaskSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    sqlServerParam CkafkaDatahubTaskSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam CkafkaDatahubTaskSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topicParam CkafkaDatahubTaskSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type str
    resource type.
    click_house_param CkafkaDatahubTaskSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    cls_param CkafkaDatahubTaskSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    cos_param CkafkaDatahubTaskSourceResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdb_param CkafkaDatahubTaskSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dts_param CkafkaDatahubTaskSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    es_param CkafkaDatahubTaskSourceResourceEsParam
    Es configuration, required when Type is ES.
    event_bus_param CkafkaDatahubTaskSourceResourceEventBusParam
    EB configuration, required when type is EB.
    kafka_param CkafkaDatahubTaskSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    maria_db_param CkafkaDatahubTaskSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongo_db_param CkafkaDatahubTaskSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    my_sql_param CkafkaDatahubTaskSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgre_sql_param CkafkaDatahubTaskSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scf_param CkafkaDatahubTaskSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    sql_server_param CkafkaDatahubTaskSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdw_param CkafkaDatahubTaskSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topic_param CkafkaDatahubTaskSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type String
    resource type.
    clickHouseParam Property Map
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam Property Map
    Cls configuration, Required when Type is CLS.
    cosParam Property Map
    Cos configuration, required when Type is COS.
    ctsdbParam Property Map
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam Property Map
    Dts configuration, required when Type is DTS.
    esParam Property Map
    Es configuration, required when Type is ES.
    eventBusParam Property Map
    EB configuration, required when type is EB.
    kafkaParam Property Map
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam Property Map
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam Property Map
    MongoDB config, Required when Type is MONGODB.
    mySqlParam Property Map
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam Property Map
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam Property Map
    Scf configuration, Required when Type is SCF.
    sqlServerParam Property Map
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam Property Map
    Tdw configuration, required when Type is TDW.
    topicParam Property Map
    Topic configuration, Required when Type is Topic.

    CkafkaDatahubTaskSourceResourceClickHouseParam, CkafkaDatahubTaskSourceResourceClickHouseParamArgs

    Cluster string
    ClickHouse cluster.
    Database string
    ClickHouse database name.
    Resource string
    resource id.
    Schemas List<CkafkaDatahubTaskSourceResourceClickHouseParamSchema>
    ClickHouse schema.
    Table string
    ClickHouse table.
    DropCls CkafkaDatahubTaskSourceResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    Ip string
    ClickHouse ip.
    Password string
    ClickHouse passwd.
    Port double
    ClickHouse port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Type string
    type of table column.
    UniqVpcId string
    instance vpc id.
    UserName string
    ClickHouse user name.
    Cluster string
    ClickHouse cluster.
    Database string
    ClickHouse database name.
    Resource string
    resource id.
    Schemas []CkafkaDatahubTaskSourceResourceClickHouseParamSchema
    ClickHouse schema.
    Table string
    ClickHouse table.
    DropCls CkafkaDatahubTaskSourceResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    Ip string
    ClickHouse ip.
    Password string
    ClickHouse passwd.
    Port float64
    ClickHouse port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Type string
    type of table column.
    UniqVpcId string
    instance vpc id.
    UserName string
    ClickHouse user name.
    cluster String
    ClickHouse cluster.
    database String
    ClickHouse database name.
    resource String
    resource id.
    schemas List<CkafkaDatahubTaskSourceResourceClickHouseParamSchema>
    ClickHouse schema.
    table String
    ClickHouse table.
    dropCls CkafkaDatahubTaskSourceResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip String
    ClickHouse ip.
    password String
    ClickHouse passwd.
    port Double
    ClickHouse port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    type String
    type of table column.
    uniqVpcId String
    instance vpc id.
    userName String
    ClickHouse user name.
    cluster string
    ClickHouse cluster.
    database string
    ClickHouse database name.
    resource string
    resource id.
    schemas CkafkaDatahubTaskSourceResourceClickHouseParamSchema[]
    ClickHouse schema.
    table string
    ClickHouse table.
    dropCls CkafkaDatahubTaskSourceResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip string
    ClickHouse ip.
    password string
    ClickHouse passwd.
    port number
    ClickHouse port.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    type string
    type of table column.
    uniqVpcId string
    instance vpc id.
    userName string
    ClickHouse user name.
    cluster str
    ClickHouse cluster.
    database str
    ClickHouse database name.
    resource str
    resource id.
    schemas Sequence[CkafkaDatahubTaskSourceResourceClickHouseParamSchema]
    ClickHouse schema.
    table str
    ClickHouse table.
    drop_cls CkafkaDatahubTaskSourceResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip str
    ClickHouse ip.
    password str
    ClickHouse passwd.
    port float
    ClickHouse port.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    type str
    type of table column.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    ClickHouse user name.
    cluster String
    ClickHouse cluster.
    database String
    ClickHouse database name.
    resource String
    resource id.
    schemas List<Property Map>
    ClickHouse schema.
    table String
    ClickHouse table.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip String
    ClickHouse ip.
    password String
    ClickHouse passwd.
    port Number
    ClickHouse port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    type String
    type of table column.
    uniqVpcId String
    instance vpc id.
    userName String
    ClickHouse user name.

    CkafkaDatahubTaskSourceResourceClickHouseParamDropCls, CkafkaDatahubTaskSourceResourceClickHouseParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskSourceResourceClickHouseParamSchema, CkafkaDatahubTaskSourceResourceClickHouseParamSchemaArgs

    AllowNull bool
    Whether the column item is allowed to be empty.
    ColumnName string
    column name.
    JsonKey string
    The json Key name corresponding to this column.
    Type string
    type of table column.
    AllowNull bool
    Whether the column item is allowed to be empty.
    ColumnName string
    column name.
    JsonKey string
    The json Key name corresponding to this column.
    Type string
    type of table column.
    allowNull Boolean
    Whether the column item is allowed to be empty.
    columnName String
    column name.
    jsonKey String
    The json Key name corresponding to this column.
    type String
    type of table column.
    allowNull boolean
    Whether the column item is allowed to be empty.
    columnName string
    column name.
    jsonKey string
    The json Key name corresponding to this column.
    type string
    type of table column.
    allow_null bool
    Whether the column item is allowed to be empty.
    column_name str
    column name.
    json_key str
    The json Key name corresponding to this column.
    type str
    type of table column.
    allowNull Boolean
    Whether the column item is allowed to be empty.
    columnName String
    column name.
    jsonKey String
    The json Key name corresponding to this column.
    type String
    type of table column.

    CkafkaDatahubTaskSourceResourceClsParam, CkafkaDatahubTaskSourceResourceClsParamArgs

    DecodeJson bool
    Whether the produced information is in json format.
    Resource string
    cls id.
    ContentKey string
    Required when Decode Json is false.
    LogSet string
    LogSet id.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    DecodeJson bool
    Whether the produced information is in json format.
    Resource string
    cls id.
    ContentKey string
    Required when Decode Json is false.
    LogSet string
    LogSet id.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson Boolean
    Whether the produced information is in json format.
    resource String
    cls id.
    contentKey String
    Required when Decode Json is false.
    logSet String
    LogSet id.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson boolean
    Whether the produced information is in json format.
    resource string
    cls id.
    contentKey string
    Required when Decode Json is false.
    logSet string
    LogSet id.
    timeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decode_json bool
    Whether the produced information is in json format.
    resource str
    cls id.
    content_key str
    Required when Decode Json is false.
    log_set str
    LogSet id.
    time_field str
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson Boolean
    Whether the produced information is in json format.
    resource String
    cls id.
    contentKey String
    Required when Decode Json is false.
    logSet String
    LogSet id.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.

    CkafkaDatahubTaskSourceResourceCosParam, CkafkaDatahubTaskSourceResourceCosParamArgs

    BucketName string
    cos bucket name.
    Region string
    region code.
    AggregateBatchSize double
    The size of aggregated messages MB.
    AggregateInterval double
    time interval.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    BucketName string
    cos bucket name.
    Region string
    region code.
    AggregateBatchSize float64
    The size of aggregated messages MB.
    AggregateInterval float64
    time interval.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    bucketName String
    cos bucket name.
    region String
    region code.
    aggregateBatchSize Double
    The size of aggregated messages MB.
    aggregateInterval Double
    time interval.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    bucketName string
    cos bucket name.
    region string
    region code.
    aggregateBatchSize number
    The size of aggregated messages MB.
    aggregateInterval number
    time interval.
    directoryTimeFormat string
    Partition format formatted according to strptime time.
    formatOutputType string
    The file format after message aggregation csv|json.
    objectKey string
    ObjectKey.
    objectKeyPrefix string
    Dumped object directory prefix.
    bucket_name str
    cos bucket name.
    region str
    region code.
    aggregate_batch_size float
    The size of aggregated messages MB.
    aggregate_interval float
    time interval.
    directory_time_format str
    Partition format formatted according to strptime time.
    format_output_type str
    The file format after message aggregation csv|json.
    object_key str
    ObjectKey.
    object_key_prefix str
    Dumped object directory prefix.
    bucketName String
    cos bucket name.
    region String
    region code.
    aggregateBatchSize Number
    The size of aggregated messages MB.
    aggregateInterval Number
    time interval.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.

    CkafkaDatahubTaskSourceResourceCtsdbParam, CkafkaDatahubTaskSourceResourceCtsdbParamArgs

    CtsdbMetric string
    Ctsdb metric.
    Resource string
    resource id.
    CtsdbMetric string
    Ctsdb metric.
    Resource string
    resource id.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    resource id.
    ctsdbMetric string
    Ctsdb metric.
    resource string
    resource id.
    ctsdb_metric str
    Ctsdb metric.
    resource str
    resource id.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    resource id.

    CkafkaDatahubTaskSourceResourceDtsParam, CkafkaDatahubTaskSourceResourceDtsParamArgs

    Resource string
    Dts instance Id.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Dts connection ip.
    Port double
    Dts connection port.
    Topic string
    Dts topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    Resource string
    Dts instance Id.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Dts connection ip.
    Port float64
    Dts connection port.
    Topic string
    Dts topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource String
    Dts instance Id.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Dts connection ip.
    port Double
    Dts connection port.
    topic String
    Dts topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource string
    Dts instance Id.
    groupId string
    Dts consumer group Id.
    groupPassword string
    Dts consumer group passwd.
    groupUser string
    Dts account.
    ip string
    Dts connection ip.
    port number
    Dts connection port.
    topic string
    Dts topic.
    tranSql boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource str
    Dts instance Id.
    group_id str
    Dts consumer group Id.
    group_password str
    Dts consumer group passwd.
    group_user str
    Dts account.
    ip str
    Dts connection ip.
    port float
    Dts connection port.
    topic str
    Dts topic.
    tran_sql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource String
    Dts instance Id.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Dts connection ip.
    port Number
    Dts connection port.
    topic String
    Dts topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.

    CkafkaDatahubTaskSourceResourceEsParam, CkafkaDatahubTaskSourceResourceEsParamArgs

    Resource string
    Resource.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls CkafkaDatahubTaskSourceResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlq CkafkaDatahubTaskSourceResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether Es discards the message of parsing failure.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    Es Password.
    Port double
    Es connection port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    Es UserName.
    Resource string
    Resource.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls CkafkaDatahubTaskSourceResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlq CkafkaDatahubTaskSourceResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether Es discards the message of parsing failure.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    Es Password.
    Port float64
    Es connection port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    Es UserName.
    resource String
    Resource.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls CkafkaDatahubTaskSourceResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq CkafkaDatahubTaskSourceResourceEsParamDropDlq
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether Es discards the message of parsing failure.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    Es Password.
    port Double
    Es connection port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    Es UserName.
    resource string
    Resource.
    contentKey string
    key for data in non-json format.
    databasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat string
    Es date suffix.
    documentIdField string
    The field name of the document ID value dumped into Es.
    dropCls CkafkaDatahubTaskSourceResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq CkafkaDatahubTaskSourceResourceEsParamDropDlq
    dead letter queue.
    dropInvalidJsonMessage boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage boolean
    Whether Es discards the message of parsing failure.
    index string
    Es index name.
    indexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password string
    Es Password.
    port number
    Es connection port.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    uniqVpcId string
    instance vpc id.
    userName string
    Es UserName.
    resource str
    Resource.
    content_key str
    key for data in non-json format.
    database_primary_key str
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    date_format str
    Es date suffix.
    document_id_field str
    The field name of the document ID value dumped into Es.
    drop_cls CkafkaDatahubTaskSourceResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_dlq CkafkaDatahubTaskSourceResourceEsParamDropDlq
    dead letter queue.
    drop_invalid_json_message bool
    Whether Es discards messages in non-json format.
    drop_invalid_message bool
    Whether Es discards the message of parsing failure.
    index str
    Es index name.
    index_type str
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password str
    Es Password.
    port float
    Es connection port.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    Es UserName.
    resource String
    Resource.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq Property Map
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether Es discards the message of parsing failure.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    Es Password.
    port Number
    Es connection port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    Es UserName.

    CkafkaDatahubTaskSourceResourceEsParamDropCls, CkafkaDatahubTaskSourceResourceEsParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskSourceResourceEsParamDropDlq, CkafkaDatahubTaskSourceResourceEsParamDropDlqArgs

    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParam CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParam CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParam CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParam CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type str
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_param CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_param CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam Property Map
    Ckafka type dlq.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParam Property Map
    DIP Topic type dead letter queue.

    CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskSourceResourceEventBusParam, CkafkaDatahubTaskSourceResourceEventBusParamArgs

    Resource string
    instance id.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    resource type. EB_COS/EB_ES/EB_CLS.
    FunctionName string
    SCF function name.
    Namespace string
    SCF namespace.
    Qualifier string
    SCF version and alias.
    Resource string
    instance id.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    resource type. EB_COS/EB_ES/EB_CLS.
    FunctionName string
    SCF function name.
    Namespace string
    SCF namespace.
    Qualifier string
    SCF version and alias.
    resource String
    instance id.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName String
    SCF function name.
    namespace String
    SCF namespace.
    qualifier String
    SCF version and alias.
    resource string
    instance id.
    selfBuilt boolean
    Whether it is a self-built cluster.
    type string
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName string
    SCF function name.
    namespace string
    SCF namespace.
    qualifier string
    SCF version and alias.
    resource str
    instance id.
    self_built bool
    Whether it is a self-built cluster.
    type str
    resource type. EB_COS/EB_ES/EB_CLS.
    function_name str
    SCF function name.
    namespace str
    SCF namespace.
    qualifier str
    SCF version and alias.
    resource String
    instance id.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName String
    SCF function name.
    namespace String
    SCF namespace.
    qualifier String
    SCF version and alias.

    CkafkaDatahubTaskSourceResourceKafkaParam, CkafkaDatahubTaskSourceResourceKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskSourceResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskSourceResourceKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskSourceResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskSourceResourceKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskSourceResourceKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskSourceResourceKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskSourceResourceMariaDbParam, CkafkaDatahubTaskSourceResourceMariaDbParamArgs

    Database string
    MariaDB database name, * for all database.
    Resource string
    MariaDB connection Id.
    Table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    schema_only|initial, default initial.
    Database string
    MariaDB database name, * for all database.
    Resource string
    MariaDB connection Id.
    Table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    schema_only|initial, default initial.
    database String
    MariaDB database name, * for all database.
    resource String
    MariaDB connection Id.
    table String
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    schema_only|initial, default initial.
    database string
    MariaDB database name, * for all database.
    resource string
    MariaDB connection Id.
    table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode string
    schema_only|initial, default initial.
    database str
    MariaDB database name, * for all database.
    resource str
    MariaDB connection Id.
    table str
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    key_columns str
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshot_mode str
    schema_only|initial, default initial.
    database String
    MariaDB database name, * for all database.
    resource String
    MariaDB connection Id.
    table String
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    schema_only|initial, default initial.

    CkafkaDatahubTaskSourceResourceMongoDbParam, CkafkaDatahubTaskSourceResourceMongoDbParamArgs

    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    MongoDB database name.
    Resource string
    resource id.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port double
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    MongoDB database name.
    Resource string
    resource id.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port float64
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    MongoDB database name.
    resource String
    resource id.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Double
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.
    collection string
    MongoDB collection.
    copyExisting boolean
    Whether to copy the stock data, the default parameter is true.
    database string
    MongoDB database name.
    resource string
    resource id.
    ip string
    Mongo DB connection ip.
    listeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password string
    MongoDB database password.
    pipeline string
    aggregation pipeline.
    port number
    MongoDB connection port.
    readPreference string
    Master-slave priority, default master node.
    selfBuilt boolean
    Whether it is a self-built cluster.
    userName string
    MongoDB database user name.
    collection str
    MongoDB collection.
    copy_existing bool
    Whether to copy the stock data, the default parameter is true.
    database str
    MongoDB database name.
    resource str
    resource id.
    ip str
    Mongo DB connection ip.
    listening_event str
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password str
    MongoDB database password.
    pipeline str
    aggregation pipeline.
    port float
    MongoDB connection port.
    read_preference str
    Master-slave priority, default master node.
    self_built bool
    Whether it is a self-built cluster.
    user_name str
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    MongoDB database name.
    resource String
    resource id.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Number
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.

    CkafkaDatahubTaskSourceResourceMySqlParam, CkafkaDatahubTaskSourceResourceMySqlParamArgs

    Database string
    MySQL database name, * is the whole database.
    Resource string
    MySQL connection Id.
    Table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls CkafkaDatahubTaskSourceResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    KeyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    Database string
    MySQL database name, * is the whole database.
    Resource string
    MySQL connection Id.
    Table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls CkafkaDatahubTaskSourceResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    KeyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    database String
    MySQL database name, * is the whole database.
    resource String
    MySQL connection Id.
    table String
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls CkafkaDatahubTaskSourceResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns String
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.
    database string
    MySQL database name, * is the whole database.
    resource string
    MySQL connection Id.
    table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn string
    the name of the column to be monitored.
    dataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    ddlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls CkafkaDatahubTaskSourceResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase string
    database name of signal table.
    snapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement string
    TopicRegex, $1, $2.
    database str
    MySQL database name, * is the whole database.
    resource str
    MySQL connection Id.
    table str
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    data_source_increment_column str
    the name of the column to be monitored.
    data_source_increment_mode str
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    data_source_monitor_mode str
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    data_source_monitor_resource str
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    data_source_start_from str
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    ddl_topic str
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    drop_cls CkafkaDatahubTaskSourceResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    is_table_regular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    key_columns str
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signal_database str
    database name of signal table.
    snapshot_mode str
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topic_regex str
    Regular expression for routing events to specific topics, defaults to (.*).
    topic_replacement str
    TopicRegex, $1, $2.
    database String
    MySQL database name, * is the whole database.
    resource String
    MySQL connection Id.
    table String
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns String
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.

    CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMappingArgs

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    message type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.

    CkafkaDatahubTaskSourceResourceMySqlParamDropCls, CkafkaDatahubTaskSourceResourceMySqlParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskSourceResourcePostgreSqlParam, CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs

    Database string
    PostgreSQL database name.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    Resource string
    PostgreSQL connection Id.
    Table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    never|initial, default initial.
    Database string
    PostgreSQL database name.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    Resource string
    PostgreSQL connection Id.
    Table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    never|initial, default initial.
    database String
    PostgreSQL database name.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    resource String
    PostgreSQL connection Id.
    table String
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    never|initial, default initial.
    database string
    PostgreSQL database name.
    pluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    resource string
    PostgreSQL connection Id.
    table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode string
    never|initial, default initial.
    database str
    PostgreSQL database name.
    plugin_name str
    (decoderbufs/pgoutput), default decoderbufs.
    resource str
    PostgreSQL connection Id.
    table str
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    data_format str
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshot_mode str
    never|initial, default initial.
    database String
    PostgreSQL database name.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    resource String
    PostgreSQL connection Id.
    table String
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    never|initial, default initial.

    CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMappingArgs

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    message type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.

    CkafkaDatahubTaskSourceResourceScfParam, CkafkaDatahubTaskSourceResourceScfParamArgs

    FunctionName string
    SCF function name.
    BatchSize double
    The maximum number of messages sent in each batch, the default is 1000.
    MaxRetries double
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    FunctionName string
    SCF function name.
    BatchSize float64
    The maximum number of messages sent in each batch, the default is 1000.
    MaxRetries float64
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    functionName String
    SCF function name.
    batchSize Double
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries Double
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    functionName string
    SCF function name.
    batchSize number
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries number
    The number of retries after the SCF call fails, the default is 5.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    function_name str
    SCF function name.
    batch_size float
    The maximum number of messages sent in each batch, the default is 1000.
    max_retries float
    The number of retries after the SCF call fails, the default is 5.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    functionName String
    SCF function name.
    batchSize Number
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries Number
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.

    CkafkaDatahubTaskSourceResourceSqlServerParam, CkafkaDatahubTaskSourceResourceSqlServerParamArgs

    Database string
    SQLServer database name.
    Resource string
    SQLServer connection Id.
    Table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    SnapshotMode string
    schema_only|initial default initial.
    Database string
    SQLServer database name.
    Resource string
    SQLServer connection Id.
    Table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    SnapshotMode string
    schema_only|initial default initial.
    database String
    SQLServer database name.
    resource String
    SQLServer connection Id.
    table String
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode String
    schema_only|initial default initial.
    database string
    SQLServer database name.
    resource string
    SQLServer connection Id.
    table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode string
    schema_only|initial default initial.
    database str
    SQLServer database name.
    resource str
    SQLServer connection Id.
    table str
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshot_mode str
    schema_only|initial default initial.
    database String
    SQLServer database name.
    resource String
    SQLServer connection Id.
    table String
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode String
    schema_only|initial default initial.

    CkafkaDatahubTaskSourceResourceTdwParam, CkafkaDatahubTaskSourceResourceTdwParamArgs

    Bid string
    Tdw bid.
    Tid string
    Tdw tid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort double
    TDW port, default 8099.
    Bid string
    Tdw bid.
    Tid string
    Tdw tid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort float64
    TDW port, default 8099.
    bid String
    Tdw bid.
    tid String
    Tdw tid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Double
    TDW port, default 8099.
    bid string
    Tdw bid.
    tid string
    Tdw tid.
    isDomestic boolean
    default true.
    tdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort number
    TDW port, default 8099.
    bid str
    Tdw bid.
    tid str
    Tdw tid.
    is_domestic bool
    default true.
    tdw_host str
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdw_port float
    TDW port, default 8099.
    bid String
    Tdw bid.
    tid String
    Tdw tid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Number
    TDW port, default 8099.

    CkafkaDatahubTaskSourceResourceTopicParam, CkafkaDatahubTaskSourceResourceTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskTargetResource, CkafkaDatahubTaskTargetResourceArgs

    Type string
    Resource Type.
    ClickHouseParam CkafkaDatahubTaskTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParam CkafkaDatahubTaskTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParam CkafkaDatahubTaskTargetResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParam CkafkaDatahubTaskTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParam CkafkaDatahubTaskTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParam CkafkaDatahubTaskTargetResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParam CkafkaDatahubTaskTargetResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParam CkafkaDatahubTaskTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParam CkafkaDatahubTaskTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParam CkafkaDatahubTaskTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParam CkafkaDatahubTaskTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParam CkafkaDatahubTaskTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParam CkafkaDatahubTaskTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParam CkafkaDatahubTaskTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParam CkafkaDatahubTaskTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParam CkafkaDatahubTaskTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    ClickHouseParam CkafkaDatahubTaskTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParam CkafkaDatahubTaskTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParam CkafkaDatahubTaskTargetResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParam CkafkaDatahubTaskTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParam CkafkaDatahubTaskTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParam CkafkaDatahubTaskTargetResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParam CkafkaDatahubTaskTargetResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParam CkafkaDatahubTaskTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParam CkafkaDatahubTaskTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParam CkafkaDatahubTaskTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParam CkafkaDatahubTaskTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParam CkafkaDatahubTaskTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParam CkafkaDatahubTaskTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParam CkafkaDatahubTaskTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParam CkafkaDatahubTaskTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParam CkafkaDatahubTaskTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    clickHouseParam CkafkaDatahubTaskTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam CkafkaDatahubTaskTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    cosParam CkafkaDatahubTaskTargetResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdbParam CkafkaDatahubTaskTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam CkafkaDatahubTaskTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    esParam CkafkaDatahubTaskTargetResourceEsParam
    Es configuration, required when Type is ES.
    eventBusParam CkafkaDatahubTaskTargetResourceEventBusParam
    EB configuration, required when type is EB.
    kafkaParam CkafkaDatahubTaskTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam CkafkaDatahubTaskTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam CkafkaDatahubTaskTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    mySqlParam CkafkaDatahubTaskTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam CkafkaDatahubTaskTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam CkafkaDatahubTaskTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    sqlServerParam CkafkaDatahubTaskTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam CkafkaDatahubTaskTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topicParam CkafkaDatahubTaskTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type string
    Resource Type.
    clickHouseParam CkafkaDatahubTaskTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam CkafkaDatahubTaskTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    cosParam CkafkaDatahubTaskTargetResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdbParam CkafkaDatahubTaskTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam CkafkaDatahubTaskTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    esParam CkafkaDatahubTaskTargetResourceEsParam
    Es configuration, required when Type is ES.
    eventBusParam CkafkaDatahubTaskTargetResourceEventBusParam
    EB configuration, required when type is EB.
    kafkaParam CkafkaDatahubTaskTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam CkafkaDatahubTaskTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam CkafkaDatahubTaskTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    mySqlParam CkafkaDatahubTaskTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam CkafkaDatahubTaskTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam CkafkaDatahubTaskTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    sqlServerParam CkafkaDatahubTaskTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam CkafkaDatahubTaskTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topicParam CkafkaDatahubTaskTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type str
    Resource Type.
    click_house_param CkafkaDatahubTaskTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    cls_param CkafkaDatahubTaskTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    cos_param CkafkaDatahubTaskTargetResourceCosParam
    Cos configuration, required when Type is COS.
    ctsdb_param CkafkaDatahubTaskTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    dts_param CkafkaDatahubTaskTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    es_param CkafkaDatahubTaskTargetResourceEsParam
    Es configuration, required when Type is ES.
    event_bus_param CkafkaDatahubTaskTargetResourceEventBusParam
    EB configuration, required when type is EB.
    kafka_param CkafkaDatahubTaskTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    maria_db_param CkafkaDatahubTaskTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    mongo_db_param CkafkaDatahubTaskTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    my_sql_param CkafkaDatahubTaskTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    postgre_sql_param CkafkaDatahubTaskTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scf_param CkafkaDatahubTaskTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    sql_server_param CkafkaDatahubTaskTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    tdw_param CkafkaDatahubTaskTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    topic_param CkafkaDatahubTaskTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    clickHouseParam Property Map
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParam Property Map
    Cls configuration, Required when Type is CLS.
    cosParam Property Map
    Cos configuration, required when Type is COS.
    ctsdbParam Property Map
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParam Property Map
    Dts configuration, required when Type is DTS.
    esParam Property Map
    Es configuration, required when Type is ES.
    eventBusParam Property Map
    EB configuration, required when type is EB.
    kafkaParam Property Map
    ckafka configuration, required when Type is KAFKA.
    mariaDbParam Property Map
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParam Property Map
    MongoDB config, Required when Type is MONGODB.
    mySqlParam Property Map
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParam Property Map
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParam Property Map
    Scf configuration, Required when Type is SCF.
    sqlServerParam Property Map
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParam Property Map
    Tdw configuration, required when Type is TDW.
    topicParam Property Map
    Topic configuration, Required when Type is Topic.

    CkafkaDatahubTaskTargetResourceClickHouseParam, CkafkaDatahubTaskTargetResourceClickHouseParamArgs

    Cluster string
    ClickHouse cluster.
    Database string
    ClickHouse database name.
    Resource string
    resource id.
    Schemas List<CkafkaDatahubTaskTargetResourceClickHouseParamSchema>
    ClickHouse schema.
    Table string
    ClickHouse table.
    DropCls CkafkaDatahubTaskTargetResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    Ip string
    ClickHouse ip.
    Password string
    ClickHouse passwd.
    Port double
    ClickHouse port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Type string
    type of table column.
    UniqVpcId string
    instance vpc id.
    UserName string
    ClickHouse user name.
    Cluster string
    ClickHouse cluster.
    Database string
    ClickHouse database name.
    Resource string
    resource id.
    Schemas []CkafkaDatahubTaskTargetResourceClickHouseParamSchema
    ClickHouse schema.
    Table string
    ClickHouse table.
    DropCls CkafkaDatahubTaskTargetResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    Ip string
    ClickHouse ip.
    Password string
    ClickHouse passwd.
    Port float64
    ClickHouse port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Type string
    type of table column.
    UniqVpcId string
    instance vpc id.
    UserName string
    ClickHouse user name.
    cluster String
    ClickHouse cluster.
    database String
    ClickHouse database name.
    resource String
    resource id.
    schemas List<CkafkaDatahubTaskTargetResourceClickHouseParamSchema>
    ClickHouse schema.
    table String
    ClickHouse table.
    dropCls CkafkaDatahubTaskTargetResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip String
    ClickHouse ip.
    password String
    ClickHouse passwd.
    port Double
    ClickHouse port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    type String
    type of table column.
    uniqVpcId String
    instance vpc id.
    userName String
    ClickHouse user name.
    cluster string
    ClickHouse cluster.
    database string
    ClickHouse database name.
    resource string
    resource id.
    schemas CkafkaDatahubTaskTargetResourceClickHouseParamSchema[]
    ClickHouse schema.
    table string
    ClickHouse table.
    dropCls CkafkaDatahubTaskTargetResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip string
    ClickHouse ip.
    password string
    ClickHouse passwd.
    port number
    ClickHouse port.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    type string
    type of table column.
    uniqVpcId string
    instance vpc id.
    userName string
    ClickHouse user name.
    cluster str
    ClickHouse cluster.
    database str
    ClickHouse database name.
    resource str
    resource id.
    schemas Sequence[CkafkaDatahubTaskTargetResourceClickHouseParamSchema]
    ClickHouse schema.
    table str
    ClickHouse table.
    drop_cls CkafkaDatahubTaskTargetResourceClickHouseParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip str
    ClickHouse ip.
    password str
    ClickHouse passwd.
    port float
    ClickHouse port.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    type str
    type of table column.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    ClickHouse user name.
    cluster String
    ClickHouse cluster.
    database String
    ClickHouse database name.
    resource String
    resource id.
    schemas List<Property Map>
    ClickHouse schema.
    table String
    ClickHouse table.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether ClickHouse discards the message that fails to parse, the default is true.
    ip String
    ClickHouse ip.
    password String
    ClickHouse passwd.
    port Number
    ClickHouse port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    type String
    type of table column.
    uniqVpcId String
    instance vpc id.
    userName String
    ClickHouse user name.

    CkafkaDatahubTaskTargetResourceClickHouseParamDropCls, CkafkaDatahubTaskTargetResourceClickHouseParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskTargetResourceClickHouseParamSchema, CkafkaDatahubTaskTargetResourceClickHouseParamSchemaArgs

    AllowNull bool
    Whether the column item is allowed to be empty.
    ColumnName string
    column name.
    JsonKey string
    The json Key name corresponding to this column.
    Type string
    type of table column.
    AllowNull bool
    Whether the column item is allowed to be empty.
    ColumnName string
    column name.
    JsonKey string
    The json Key name corresponding to this column.
    Type string
    type of table column.
    allowNull Boolean
    Whether the column item is allowed to be empty.
    columnName String
    column name.
    jsonKey String
    The json Key name corresponding to this column.
    type String
    type of table column.
    allowNull boolean
    Whether the column item is allowed to be empty.
    columnName string
    column name.
    jsonKey string
    The json Key name corresponding to this column.
    type string
    type of table column.
    allow_null bool
    Whether the column item is allowed to be empty.
    column_name str
    column name.
    json_key str
    The json Key name corresponding to this column.
    type str
    type of table column.
    allowNull Boolean
    Whether the column item is allowed to be empty.
    columnName String
    column name.
    jsonKey String
    The json Key name corresponding to this column.
    type String
    type of table column.

    CkafkaDatahubTaskTargetResourceClsParam, CkafkaDatahubTaskTargetResourceClsParamArgs

    DecodeJson bool
    Whether the produced information is in json format.
    Resource string
    cls id.
    ContentKey string
    Required when Decode Json is false.
    LogSet string
    LogSet id.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    DecodeJson bool
    Whether the produced information is in json format.
    Resource string
    cls id.
    ContentKey string
    Required when Decode Json is false.
    LogSet string
    LogSet id.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson Boolean
    Whether the produced information is in json format.
    resource String
    cls id.
    contentKey String
    Required when Decode Json is false.
    logSet String
    LogSet id.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson boolean
    Whether the produced information is in json format.
    resource string
    cls id.
    contentKey string
    Required when Decode Json is false.
    logSet string
    LogSet id.
    timeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decode_json bool
    Whether the produced information is in json format.
    resource str
    cls id.
    content_key str
    Required when Decode Json is false.
    log_set str
    LogSet id.
    time_field str
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    decodeJson Boolean
    Whether the produced information is in json format.
    resource String
    cls id.
    contentKey String
    Required when Decode Json is false.
    logSet String
    LogSet id.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.

    CkafkaDatahubTaskTargetResourceCosParam, CkafkaDatahubTaskTargetResourceCosParamArgs

    BucketName string
    cos bucket name.
    Region string
    region code.
    AggregateBatchSize double
    The size of aggregated messages MB.
    AggregateInterval double
    time interval.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    BucketName string
    cos bucket name.
    Region string
    region code.
    AggregateBatchSize float64
    The size of aggregated messages MB.
    AggregateInterval float64
    time interval.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    bucketName String
    cos bucket name.
    region String
    region code.
    aggregateBatchSize Double
    The size of aggregated messages MB.
    aggregateInterval Double
    time interval.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    bucketName string
    cos bucket name.
    region string
    region code.
    aggregateBatchSize number
    The size of aggregated messages MB.
    aggregateInterval number
    time interval.
    directoryTimeFormat string
    Partition format formatted according to strptime time.
    formatOutputType string
    The file format after message aggregation csv|json.
    objectKey string
    ObjectKey.
    objectKeyPrefix string
    Dumped object directory prefix.
    bucket_name str
    cos bucket name.
    region str
    region code.
    aggregate_batch_size float
    The size of aggregated messages MB.
    aggregate_interval float
    time interval.
    directory_time_format str
    Partition format formatted according to strptime time.
    format_output_type str
    The file format after message aggregation csv|json.
    object_key str
    ObjectKey.
    object_key_prefix str
    Dumped object directory prefix.
    bucketName String
    cos bucket name.
    region String
    region code.
    aggregateBatchSize Number
    The size of aggregated messages MB.
    aggregateInterval Number
    time interval.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.

    CkafkaDatahubTaskTargetResourceCtsdbParam, CkafkaDatahubTaskTargetResourceCtsdbParamArgs

    CtsdbMetric string
    Ctsdb metric.
    Resource string
    resource id.
    CtsdbMetric string
    Ctsdb metric.
    Resource string
    resource id.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    resource id.
    ctsdbMetric string
    Ctsdb metric.
    resource string
    resource id.
    ctsdb_metric str
    Ctsdb metric.
    resource str
    resource id.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    resource id.

    CkafkaDatahubTaskTargetResourceDtsParam, CkafkaDatahubTaskTargetResourceDtsParamArgs

    Resource string
    Dts instance Id.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Dts connection ip.
    Port double
    Dts connection port.
    Topic string
    Dts topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    Resource string
    Dts instance Id.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Dts connection ip.
    Port float64
    Dts connection port.
    Topic string
    Dts topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource String
    Dts instance Id.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Dts connection ip.
    port Double
    Dts connection port.
    topic String
    Dts topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource string
    Dts instance Id.
    groupId string
    Dts consumer group Id.
    groupPassword string
    Dts consumer group passwd.
    groupUser string
    Dts account.
    ip string
    Dts connection ip.
    port number
    Dts connection port.
    topic string
    Dts topic.
    tranSql boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource str
    Dts instance Id.
    group_id str
    Dts consumer group Id.
    group_password str
    Dts consumer group passwd.
    group_user str
    Dts account.
    ip str
    Dts connection ip.
    port float
    Dts connection port.
    topic str
    Dts topic.
    tran_sql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    resource String
    Dts instance Id.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Dts connection ip.
    port Number
    Dts connection port.
    topic String
    Dts topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.

    CkafkaDatahubTaskTargetResourceEsParam, CkafkaDatahubTaskTargetResourceEsParamArgs

    Resource string
    Resource.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls CkafkaDatahubTaskTargetResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlq CkafkaDatahubTaskTargetResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether Es discards the message of parsing failure.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    Es Password.
    Port double
    Es connection port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    Es UserName.
    Resource string
    Resource.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls CkafkaDatahubTaskTargetResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlq CkafkaDatahubTaskTargetResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether Es discards the message of parsing failure.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    Es Password.
    Port float64
    Es connection port.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    Es UserName.
    resource String
    Resource.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls CkafkaDatahubTaskTargetResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq CkafkaDatahubTaskTargetResourceEsParamDropDlq
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether Es discards the message of parsing failure.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    Es Password.
    port Double
    Es connection port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    Es UserName.
    resource string
    Resource.
    contentKey string
    key for data in non-json format.
    databasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat string
    Es date suffix.
    documentIdField string
    The field name of the document ID value dumped into Es.
    dropCls CkafkaDatahubTaskTargetResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq CkafkaDatahubTaskTargetResourceEsParamDropDlq
    dead letter queue.
    dropInvalidJsonMessage boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage boolean
    Whether Es discards the message of parsing failure.
    index string
    Es index name.
    indexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password string
    Es Password.
    port number
    Es connection port.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    uniqVpcId string
    instance vpc id.
    userName string
    Es UserName.
    resource str
    Resource.
    content_key str
    key for data in non-json format.
    database_primary_key str
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    date_format str
    Es date suffix.
    document_id_field str
    The field name of the document ID value dumped into Es.
    drop_cls CkafkaDatahubTaskTargetResourceEsParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_dlq CkafkaDatahubTaskTargetResourceEsParamDropDlq
    dead letter queue.
    drop_invalid_json_message bool
    Whether Es discards messages in non-json format.
    drop_invalid_message bool
    Whether Es discards the message of parsing failure.
    index str
    Es index name.
    index_type str
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password str
    Es Password.
    port float
    Es connection port.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    Es UserName.
    resource String
    Resource.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlq Property Map
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether Es discards the message of parsing failure.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    Es Password.
    port Number
    Es connection port.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    Es UserName.

    CkafkaDatahubTaskTargetResourceEsParamDropCls, CkafkaDatahubTaskTargetResourceEsParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskTargetResourceEsParamDropDlq, CkafkaDatahubTaskTargetResourceEsParamDropDlqArgs

    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParam CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParam CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParam CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParam CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type str
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_param CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam
    Ckafka type dlq.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_param CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam Property Map
    Ckafka type dlq.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParam Property Map
    DIP Topic type dead letter queue.

    CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskTargetResourceEventBusParam, CkafkaDatahubTaskTargetResourceEventBusParamArgs

    Resource string
    instance id.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    resource type. EB_COS/EB_ES/EB_CLS.
    FunctionName string
    SCF function name.
    Namespace string
    SCF namespace.
    Qualifier string
    SCF version and alias.
    Resource string
    instance id.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    resource type. EB_COS/EB_ES/EB_CLS.
    FunctionName string
    SCF function name.
    Namespace string
    SCF namespace.
    Qualifier string
    SCF version and alias.
    resource String
    instance id.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName String
    SCF function name.
    namespace String
    SCF namespace.
    qualifier String
    SCF version and alias.
    resource string
    instance id.
    selfBuilt boolean
    Whether it is a self-built cluster.
    type string
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName string
    SCF function name.
    namespace string
    SCF namespace.
    qualifier string
    SCF version and alias.
    resource str
    instance id.
    self_built bool
    Whether it is a self-built cluster.
    type str
    resource type. EB_COS/EB_ES/EB_CLS.
    function_name str
    SCF function name.
    namespace str
    SCF namespace.
    qualifier str
    SCF version and alias.
    resource String
    instance id.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    resource type. EB_COS/EB_ES/EB_CLS.
    functionName String
    SCF function name.
    namespace String
    SCF namespace.
    qualifier String
    SCF version and alias.

    CkafkaDatahubTaskTargetResourceKafkaParam, CkafkaDatahubTaskTargetResourceKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskTargetResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskTargetResourceKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskTargetResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskTargetResourceKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskTargetResourceKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskTargetResourceKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskTargetResourceMariaDbParam, CkafkaDatahubTaskTargetResourceMariaDbParamArgs

    Database string
    MariaDB database name, * for all database.
    Resource string
    MariaDB connection Id.
    Table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    schema_only|initial, default initial.
    Database string
    MariaDB database name, * for all database.
    Resource string
    MariaDB connection Id.
    Table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    schema_only|initial, default initial.
    database String
    MariaDB database name, * for all database.
    resource String
    MariaDB connection Id.
    table String
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    schema_only|initial, default initial.
    database string
    MariaDB database name, * for all database.
    resource string
    MariaDB connection Id.
    table string
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns string
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode string
    schema_only|initial, default initial.
    database str
    MariaDB database name, * for all database.
    resource str
    MariaDB connection Id.
    table str
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    key_columns str
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshot_mode str
    schema_only|initial, default initial.
    database String
    MariaDB database name, * for all database.
    resource String
    MariaDB connection Id.
    table String
    MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    schema_only|initial, default initial.

    CkafkaDatahubTaskTargetResourceMongoDbParam, CkafkaDatahubTaskTargetResourceMongoDbParamArgs

    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    MongoDB database name.
    Resource string
    resource id.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port double
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    MongoDB database name.
    Resource string
    resource id.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port float64
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    MongoDB database name.
    resource String
    resource id.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Double
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.
    collection string
    MongoDB collection.
    copyExisting boolean
    Whether to copy the stock data, the default parameter is true.
    database string
    MongoDB database name.
    resource string
    resource id.
    ip string
    Mongo DB connection ip.
    listeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password string
    MongoDB database password.
    pipeline string
    aggregation pipeline.
    port number
    MongoDB connection port.
    readPreference string
    Master-slave priority, default master node.
    selfBuilt boolean
    Whether it is a self-built cluster.
    userName string
    MongoDB database user name.
    collection str
    MongoDB collection.
    copy_existing bool
    Whether to copy the stock data, the default parameter is true.
    database str
    MongoDB database name.
    resource str
    resource id.
    ip str
    Mongo DB connection ip.
    listening_event str
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password str
    MongoDB database password.
    pipeline str
    aggregation pipeline.
    port float
    MongoDB connection port.
    read_preference str
    Master-slave priority, default master node.
    self_built bool
    Whether it is a self-built cluster.
    user_name str
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    MongoDB database name.
    resource String
    resource id.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Number
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.

    CkafkaDatahubTaskTargetResourceMySqlParam, CkafkaDatahubTaskTargetResourceMySqlParamArgs

    Database string
    MySQL database name, * is the whole database.
    Resource string
    MySQL connection Id.
    Table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls CkafkaDatahubTaskTargetResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    KeyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    Database string
    MySQL database name, * is the whole database.
    Resource string
    MySQL connection Id.
    Table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls CkafkaDatahubTaskTargetResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    KeyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    database String
    MySQL database name, * is the whole database.
    resource String
    MySQL connection Id.
    table String
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls CkafkaDatahubTaskTargetResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns String
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.
    database string
    MySQL database name, * is the whole database.
    resource string
    MySQL connection Id.
    table string
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn string
    the name of the column to be monitored.
    dataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    ddlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls CkafkaDatahubTaskTargetResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns string
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase string
    database name of signal table.
    snapshotMode string
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement string
    TopicRegex, $1, $2.
    database str
    MySQL database name, * is the whole database.
    resource str
    MySQL connection Id.
    table str
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    data_source_increment_column str
    the name of the column to be monitored.
    data_source_increment_mode str
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    data_source_monitor_mode str
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    data_source_monitor_resource str
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    data_source_start_from str
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    ddl_topic str
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    drop_cls CkafkaDatahubTaskTargetResourceMySqlParamDropCls
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    is_table_regular bool
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    key_columns str
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signal_database str
    database name of signal table.
    snapshot_mode str
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topic_regex str
    Regular expression for routing events to specific topics, defaults to (.*).
    topic_replacement str
    TopicRegex, $1, $2.
    database String
    MySQL database name, * is the whole database.
    resource String
    MySQL connection Id.
    table String
    The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls Property Map
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
    keyColumns String
    Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.

    CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMappingArgs

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    message type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.

    CkafkaDatahubTaskTargetResourceMySqlParamDropCls, CkafkaDatahubTaskTargetResourceMySqlParamDropClsArgs

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    CkafkaDatahubTaskTargetResourcePostgreSqlParam, CkafkaDatahubTaskTargetResourcePostgreSqlParamArgs

    Database string
    PostgreSQL database name.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    Resource string
    PostgreSQL connection Id.
    Table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    never|initial, default initial.
    Database string
    PostgreSQL database name.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    Resource string
    PostgreSQL connection Id.
    Table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    SnapshotMode string
    never|initial, default initial.
    database String
    PostgreSQL database name.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    resource String
    PostgreSQL connection Id.
    table String
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    never|initial, default initial.
    database string
    PostgreSQL database name.
    pluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    resource string
    PostgreSQL connection Id.
    table string
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode string
    never|initial, default initial.
    database str
    PostgreSQL database name.
    plugin_name str
    (decoderbufs/pgoutput), default decoderbufs.
    resource str
    PostgreSQL connection Id.
    table str
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    data_format str
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshot_mode str
    never|initial, default initial.
    database String
    PostgreSQL database name.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    resource String
    PostgreSQL connection Id.
    table String
    PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    snapshotMode String
    never|initial, default initial.

    CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMappingArgs

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    message type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    message type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    message type.

    CkafkaDatahubTaskTargetResourceScfParam, CkafkaDatahubTaskTargetResourceScfParamArgs

    FunctionName string
    SCF function name.
    BatchSize double
    The maximum number of messages sent in each batch, the default is 1000.
    MaxRetries double
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    FunctionName string
    SCF function name.
    BatchSize float64
    The maximum number of messages sent in each batch, the default is 1000.
    MaxRetries float64
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    functionName String
    SCF function name.
    batchSize Double
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries Double
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    functionName string
    SCF function name.
    batchSize number
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries number
    The number of retries after the SCF call fails, the default is 5.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    function_name str
    SCF function name.
    batch_size float
    The maximum number of messages sent in each batch, the default is 1000.
    max_retries float
    The number of retries after the SCF call fails, the default is 5.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    functionName String
    SCF function name.
    batchSize Number
    The maximum number of messages sent in each batch, the default is 1000.
    maxRetries Number
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.

    CkafkaDatahubTaskTargetResourceSqlServerParam, CkafkaDatahubTaskTargetResourceSqlServerParamArgs

    Database string
    SQLServer database name.
    Resource string
    SQLServer connection Id.
    Table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    SnapshotMode string
    schema_only|initial default initial.
    Database string
    SQLServer database name.
    Resource string
    SQLServer connection Id.
    Table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    SnapshotMode string
    schema_only|initial default initial.
    database String
    SQLServer database name.
    resource String
    SQLServer connection Id.
    table String
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode String
    schema_only|initial default initial.
    database string
    SQLServer database name.
    resource string
    SQLServer connection Id.
    table string
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode string
    schema_only|initial default initial.
    database str
    SQLServer database name.
    resource str
    SQLServer connection Id.
    table str
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshot_mode str
    schema_only|initial default initial.
    database String
    SQLServer database name.
    resource String
    SQLServer connection Id.
    table String
    SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    snapshotMode String
    schema_only|initial default initial.

    CkafkaDatahubTaskTargetResourceTdwParam, CkafkaDatahubTaskTargetResourceTdwParamArgs

    Bid string
    Tdw bid.
    Tid string
    Tdw tid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort double
    TDW port, default 8099.
    Bid string
    Tdw bid.
    Tid string
    Tdw tid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort float64
    TDW port, default 8099.
    bid String
    Tdw bid.
    tid String
    Tdw tid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Double
    TDW port, default 8099.
    bid string
    Tdw bid.
    tid string
    Tdw tid.
    isDomestic boolean
    default true.
    tdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort number
    TDW port, default 8099.
    bid str
    Tdw bid.
    tid str
    Tdw tid.
    is_domestic bool
    default true.
    tdw_host str
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdw_port float
    TDW port, default 8099.
    bid String
    Tdw bid.
    tid String
    Tdw tid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Number
    TDW port, default 8099.

    CkafkaDatahubTaskTargetResourceTopicParam, CkafkaDatahubTaskTargetResourceTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskTransformParam, CkafkaDatahubTaskTransformParamArgs

    AnalysisFormat string
    parsing format, JSON | DELIMITER| REGULAR.
    Content string
    Raw data.
    FailureParam CkafkaDatahubTaskTransformParamFailureParam
    Whether to keep parsing failure data.
    OutputFormat string
    output format.
    SourceType string
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    AnalyseResults List<CkafkaDatahubTaskTransformParamAnalyseResult>
    Analysis result.
    FilterParams List<CkafkaDatahubTaskTransformParamFilterParam>
    filter.
    MapParams List<CkafkaDatahubTaskTransformParamMapParam>
    Map.
    Regex string
    delimiter, regular expression.
    Result string
    Test Results.
    UseEventBus bool
    Whether the underlying engine uses eb.
    AnalysisFormat string
    parsing format, JSON | DELIMITER| REGULAR.
    Content string
    Raw data.
    FailureParam CkafkaDatahubTaskTransformParamFailureParam
    Whether to keep parsing failure data.
    OutputFormat string
    output format.
    SourceType string
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    AnalyseResults []CkafkaDatahubTaskTransformParamAnalyseResult
    Analysis result.
    FilterParams []CkafkaDatahubTaskTransformParamFilterParam
    filter.
    MapParams []CkafkaDatahubTaskTransformParamMapParam
    Map.
    Regex string
    delimiter, regular expression.
    Result string
    Test Results.
    UseEventBus bool
    Whether the underlying engine uses eb.
    analysisFormat String
    parsing format, JSON | DELIMITER| REGULAR.
    content String
    Raw data.
    failureParam CkafkaDatahubTaskTransformParamFailureParam
    Whether to keep parsing failure data.
    outputFormat String
    output format.
    sourceType String
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    analyseResults List<CkafkaDatahubTaskTransformParamAnalyseResult>
    Analysis result.
    filterParams List<CkafkaDatahubTaskTransformParamFilterParam>
    filter.
    mapParams List<CkafkaDatahubTaskTransformParamMapParam>
    Map.
    regex String
    delimiter, regular expression.
    result String
    Test Results.
    useEventBus Boolean
    Whether the underlying engine uses eb.
    analysisFormat string
    parsing format, JSON | DELIMITER| REGULAR.
    content string
    Raw data.
    failureParam CkafkaDatahubTaskTransformParamFailureParam
    Whether to keep parsing failure data.
    outputFormat string
    output format.
    sourceType string
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    analyseResults CkafkaDatahubTaskTransformParamAnalyseResult[]
    Analysis result.
    filterParams CkafkaDatahubTaskTransformParamFilterParam[]
    filter.
    mapParams CkafkaDatahubTaskTransformParamMapParam[]
    Map.
    regex string
    delimiter, regular expression.
    result string
    Test Results.
    useEventBus boolean
    Whether the underlying engine uses eb.
    analysis_format str
    parsing format, JSON | DELIMITER| REGULAR.
    content str
    Raw data.
    failure_param CkafkaDatahubTaskTransformParamFailureParam
    Whether to keep parsing failure data.
    output_format str
    output format.
    source_type str
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    analyse_results Sequence[CkafkaDatahubTaskTransformParamAnalyseResult]
    Analysis result.
    filter_params Sequence[CkafkaDatahubTaskTransformParamFilterParam]
    filter.
    map_params Sequence[CkafkaDatahubTaskTransformParamMapParam]
    Map.
    regex str
    delimiter, regular expression.
    result str
    Test Results.
    use_event_bus bool
    Whether the underlying engine uses eb.
    analysisFormat String
    parsing format, JSON | DELIMITER| REGULAR.
    content String
    Raw data.
    failureParam Property Map
    Whether to keep parsing failure data.
    outputFormat String
    output format.
    sourceType String
    Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
    analyseResults List<Property Map>
    Analysis result.
    filterParams List<Property Map>
    filter.
    mapParams List<Property Map>
    Map.
    regex String
    delimiter, regular expression.
    result String
    Test Results.
    useEventBus Boolean
    Whether the underlying engine uses eb.

    CkafkaDatahubTaskTransformParamAnalyseResult, CkafkaDatahubTaskTransformParamAnalyseResultArgs

    Key string
    key.
    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Value string
    value.
    Key string
    key.
    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Value string
    value.
    key String
    key.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    value String
    value.
    key string
    key.
    type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    value string
    value.
    key str
    key.
    type str
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    value str
    value.
    key String
    key.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    value String
    value.

    CkafkaDatahubTaskTransformParamFailureParam, CkafkaDatahubTaskTransformParamFailureParamArgs

    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTransformParamFailureParamKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParam CkafkaDatahubTaskTransformParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTransformParamFailureParamKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParam CkafkaDatahubTaskTransformParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTransformParamFailureParamKafkaParam
    Ckafka type dlq.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParam CkafkaDatahubTaskTransformParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTransformParamFailureParamKafkaParam
    Ckafka type dlq.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParam CkafkaDatahubTaskTransformParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type str
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_param CkafkaDatahubTaskTransformParamFailureParamKafkaParam
    Ckafka type dlq.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_param CkafkaDatahubTaskTransformParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam Property Map
    Ckafka type dlq.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParam Property Map
    DIP Topic type dead letter queue.

    CkafkaDatahubTaskTransformParamFailureParamKafkaParam, CkafkaDatahubTaskTransformParamFailureParamKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskTransformParamFailureParamTopicParam, CkafkaDatahubTaskTransformParamFailureParamTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskTransformParamFilterParam, CkafkaDatahubTaskTransformParamFilterParamArgs

    Key string
    Key.
    MatchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    Value string
    Value.
    Type string
    REGULAR.
    Key string
    Key.
    MatchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    Value string
    Value.
    Type string
    REGULAR.
    key String
    Key.
    matchMode String
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value String
    Value.
    type String
    REGULAR.
    key string
    Key.
    matchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value string
    Value.
    type string
    REGULAR.
    key str
    Key.
    match_mode str
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value str
    Value.
    type str
    REGULAR.
    key String
    Key.
    matchMode String
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value String
    Value.
    type String
    REGULAR.

    CkafkaDatahubTaskTransformParamMapParam, CkafkaDatahubTaskTransformParamMapParamArgs

    Key string
    key.
    Type string
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    Value string
    value.
    Key string
    key.
    Type string
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    Value string
    value.
    key String
    key.
    type String
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    value String
    value.
    key string
    key.
    type string
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    value string
    value.
    key str
    key.
    type str
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    value str
    value.
    key String
    key.
    type String
    Type, DEFAULT default, DATE system default - timestamp, CUSTOMIZE custom, MAPPING mapping.
    value String
    value.

    CkafkaDatahubTaskTransformsParam, CkafkaDatahubTaskTransformsParamArgs

    Content string
    Raw data.
    FieldChains List<CkafkaDatahubTaskTransformsParamFieldChain>
    processing chain.
    BatchAnalyse CkafkaDatahubTaskTransformsParamBatchAnalyse
    data process.
    FailureParam CkafkaDatahubTaskTransformsParamFailureParam
    fail process.
    FilterParams List<CkafkaDatahubTaskTransformsParamFilterParam>
    filter.
    KeepMetadata bool
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    OutputFormat string
    output format, JSON, ROW, default JSON.
    Result string
    result.
    RowParam CkafkaDatahubTaskTransformsParamRowParam
    The output format is ROW Required.
    SourceType string
    data source.
    Content string
    Raw data.
    FieldChains []CkafkaDatahubTaskTransformsParamFieldChain
    processing chain.
    BatchAnalyse CkafkaDatahubTaskTransformsParamBatchAnalyse
    data process.
    FailureParam CkafkaDatahubTaskTransformsParamFailureParam
    fail process.
    FilterParams []CkafkaDatahubTaskTransformsParamFilterParam
    filter.
    KeepMetadata bool
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    OutputFormat string
    output format, JSON, ROW, default JSON.
    Result string
    result.
    RowParam CkafkaDatahubTaskTransformsParamRowParam
    The output format is ROW Required.
    SourceType string
    data source.
    content String
    Raw data.
    fieldChains List<CkafkaDatahubTaskTransformsParamFieldChain>
    processing chain.
    batchAnalyse CkafkaDatahubTaskTransformsParamBatchAnalyse
    data process.
    failureParam CkafkaDatahubTaskTransformsParamFailureParam
    fail process.
    filterParams List<CkafkaDatahubTaskTransformsParamFilterParam>
    filter.
    keepMetadata Boolean
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    outputFormat String
    output format, JSON, ROW, default JSON.
    result String
    result.
    rowParam CkafkaDatahubTaskTransformsParamRowParam
    The output format is ROW Required.
    sourceType String
    data source.
    content string
    Raw data.
    fieldChains CkafkaDatahubTaskTransformsParamFieldChain[]
    processing chain.
    batchAnalyse CkafkaDatahubTaskTransformsParamBatchAnalyse
    data process.
    failureParam CkafkaDatahubTaskTransformsParamFailureParam
    fail process.
    filterParams CkafkaDatahubTaskTransformsParamFilterParam[]
    filter.
    keepMetadata boolean
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    outputFormat string
    output format, JSON, ROW, default JSON.
    result string
    result.
    rowParam CkafkaDatahubTaskTransformsParamRowParam
    The output format is ROW Required.
    sourceType string
    data source.
    content str
    Raw data.
    field_chains Sequence[CkafkaDatahubTaskTransformsParamFieldChain]
    processing chain.
    batch_analyse CkafkaDatahubTaskTransformsParamBatchAnalyse
    data process.
    failure_param CkafkaDatahubTaskTransformsParamFailureParam
    fail process.
    filter_params Sequence[CkafkaDatahubTaskTransformsParamFilterParam]
    filter.
    keep_metadata bool
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    output_format str
    output format, JSON, ROW, default JSON.
    result str
    result.
    row_param CkafkaDatahubTaskTransformsParamRowParam
    The output format is ROW Required.
    source_type str
    data source.
    content String
    Raw data.
    fieldChains List<Property Map>
    processing chain.
    batchAnalyse Property Map
    data process.
    failureParam Property Map
    fail process.
    filterParams List<Property Map>
    filter.
    keepMetadata Boolean
    Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
    outputFormat String
    output format, JSON, ROW, default JSON.
    result String
    result.
    rowParam Property Map
    The output format is ROW Required.
    sourceType String
    data source.

    CkafkaDatahubTaskTransformsParamBatchAnalyse, CkafkaDatahubTaskTransformsParamBatchAnalyseArgs

    Format string
    ONE BY ONE single output, MERGE combined output.
    Format string
    ONE BY ONE single output, MERGE combined output.
    format String
    ONE BY ONE single output, MERGE combined output.
    format string
    ONE BY ONE single output, MERGE combined output.
    format str
    ONE BY ONE single output, MERGE combined output.
    format String
    ONE BY ONE single output, MERGE combined output.

    CkafkaDatahubTaskTransformsParamFailureParam, CkafkaDatahubTaskTransformsParamFailureParamArgs

    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTransformsParamFailureParamKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParam CkafkaDatahubTaskTransformsParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    Type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParam CkafkaDatahubTaskTransformsParamFailureParamKafkaParam
    Ckafka type dlq.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParam CkafkaDatahubTaskTransformsParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTransformsParamFailureParamKafkaParam
    Ckafka type dlq.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParam CkafkaDatahubTaskTransformsParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type string
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParam CkafkaDatahubTaskTransformsParamFailureParamKafkaParam
    Ckafka type dlq.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParam CkafkaDatahubTaskTransformsParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type str
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_param CkafkaDatahubTaskTransformsParamFailureParamKafkaParam
    Ckafka type dlq.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_param CkafkaDatahubTaskTransformsParamFailureParamTopicParam
    DIP Topic type dead letter queue.
    type String
    type, DLQ dead letter queue, IGNORE_ERROR|DROP.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParam Property Map
    Ckafka type dlq.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParam Property Map
    DIP Topic type dead letter queue.

    CkafkaDatahubTaskTransformsParamFailureParamKafkaParam, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamArgs

    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime double
    when Offset type timestamp is required.
    TableMappings List<CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    Resource string
    instance resource.
    SelfBuilt bool
    whether the cluster is built by yourself instead of cloud product.
    CompressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    EnableToleration bool
    enable dead letter queue.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    ResourceName string
    instance name.
    StartTime float64
    when Offset type timestamp is required.
    TableMappings []CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    UseAutoCreateTopic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Double
    when Offset type timestamp is required.
    tableMappings List<CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    resource string
    instance resource.
    selfBuilt boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType string
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration boolean
    enable dead letter queue.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resourceName string
    instance name.
    startTime number
    when Offset type timestamp is required.
    tableMappings CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name.
    topicId string
    Topic ID.
    useAutoCreateTopic boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    resource str
    instance resource.
    self_built bool
    whether the cluster is built by yourself instead of cloud product.
    compression_type str
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enable_toleration bool
    enable dead letter queue.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource_name str
    instance name.
    start_time float
    when Offset type timestamp is required.
    table_mappings Sequence[CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    use_auto_create_topic bool
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    resource String
    instance resource.
    selfBuilt Boolean
    whether the cluster is built by yourself instead of cloud product.
    compressionType String
    Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
    enableToleration Boolean
    enable dead letter queue.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resourceName String
    instance name.
    startTime Number
    when Offset type timestamp is required.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name.
    topicId String
    Topic ID.
    useAutoCreateTopic Boolean
    Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMappingArgs

    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    Database string
    database name.
    Table string
    table name,use, to separate.
    Topic string
    Topic name.
    TopicId string
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.
    database string
    database name.
    table string
    table name,use, to separate.
    topic string
    Topic name.
    topicId string
    Topic ID.
    database str
    database name.
    table str
    table name,use, to separate.
    topic str
    Topic name.
    topic_id str
    Topic ID.
    database String
    database name.
    table String
    table name,use, to separate.
    topic String
    Topic name.
    topicId String
    Topic ID.

    CkafkaDatahubTaskTransformsParamFailureParamTopicParam, CkafkaDatahubTaskTransformsParamFailureParamTopicParamArgs

    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    Resource string
    The topic name of the topic sold separately.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource string
    The topic name of the topic sold separately.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource str
    The topic name of the topic sold separately.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    resource String
    The topic name of the topic sold separately.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    CkafkaDatahubTaskTransformsParamFieldChain, CkafkaDatahubTaskTransformsParamFieldChainArgs

    analyse Property Map
    analyze.
    analyseJsonResult String
    Parsing results in JSON format.
    analyseResults List<Property Map>
    Analysis result.
    result String
    Test Results.
    sMTs List<Property Map>
    data processing.
    secondaryAnalyse Property Map
    secondary analysis.
    secondaryAnalyseJsonResult String
    Secondary parsing results in JSON format.
    secondaryAnalyseResults List<Property Map>
    Secondary Analysis Results.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyse, CkafkaDatahubTaskTransformsParamFieldChainAnalyseArgs

    Format string
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    InputValue string
    KEY to be processed again - KEY expression.
    InputValueType string
    KEY to be processed again - mode.
    Regex string
    delimiter, regular expression.
    Format string
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    InputValue string
    KEY to be processed again - KEY expression.
    InputValueType string
    KEY to be processed again - mode.
    Regex string
    delimiter, regular expression.
    format String
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    inputValue String
    KEY to be processed again - KEY expression.
    inputValueType String
    KEY to be processed again - mode.
    regex String
    delimiter, regular expression.
    format string
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    inputValue string
    KEY to be processed again - KEY expression.
    inputValueType string
    KEY to be processed again - mode.
    regex string
    delimiter, regular expression.
    format str
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    input_value str
    KEY to be processed again - KEY expression.
    input_value_type str
    KEY to be processed again - mode.
    regex str
    delimiter, regular expression.
    format String
    Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
    inputValue String
    KEY to be processed again - KEY expression.
    inputValueType String
    KEY to be processed again - mode.
    regex String
    delimiter, regular expression.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultArgs

    Key string
    key.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    value.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process.
    ValueOperates List<CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate>
    VALUE process chain.
    Key string
    key.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    value.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process.
    ValueOperates []CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process chain.
    key String
    key.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    value.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process.
    valueOperates List<CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate>
    VALUE process chain.
    key string
    key.
    operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue string
    OriginalValue.
    value string
    value.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process.
    valueOperates CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate[]
    VALUE process chain.
    key str
    key.
    operate str
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    scheme_type str
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    original_value str
    OriginalValue.
    value str
    value.
    value_operate CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate
    VALUE process.
    value_operates Sequence[CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate]
    VALUE process chain.
    key String
    key.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    value.
    valueOperate Property Map
    VALUE process.
    valueOperates List<Property Map>
    VALUE process chain.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateArgs

    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode
    Url parsing.
    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result String
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode
    Url parsing.
    type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result string
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode
    Url parsing.
    type str
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    json_path_replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    k_v CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regex_replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result str
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    url_decode CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date Property Map
    Time conversion, required when TYPE=DATE.
    jsonPathReplace Property Map
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV Property Map
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace Property Map
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace Property Map
    replace, TYPE=REPLACE is required.
    result String
    result.
    split Property Map
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr Property Map
    Substr, TYPE=SUBSTR is required.
    urlDecode Property Map
    Url parsing.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDateArgs

    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.
    format string
    Time format.
    targetType string
    input type, string|unix.
    timeZone string
    default GMT+8.
    format str
    Time format.
    target_type str
    input type, string|unix.
    time_zone str
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplaceArgs

    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.
    newValue string
    Replacement value, Jsonpath expression or string.
    oldValue string
    Replaced value, Jsonpath expression.
    new_value str
    Replacement value, Jsonpath expression or string.
    old_value str
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKVArgs

    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.
    delimiter string
    delimiter.
    regex string
    Key-value secondary analysis delimiter.
    keepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter str
    delimiter.
    regex str
    Key-value secondary analysis delimiter.
    keep_original_key str
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplaceArgs

    NewValue string
    new value.
    Regex string
    Regular.
    NewValue string
    new value.
    Regex string
    Regular.
    newValue String
    new value.
    regex String
    Regular.
    newValue string
    new value.
    regex string
    Regular.
    new_value str
    new value.
    regex str
    Regular.
    newValue String
    new value.
    regex String
    Regular.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplaceArgs

    NewValue string
    new value.
    OldValue string
    been replaced value.
    NewValue string
    new value.
    OldValue string
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.
    newValue string
    new value.
    oldValue string
    been replaced value.
    new_value str
    new value.
    old_value str
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplitArgs

    Regex string
    delimiter.
    Regex string
    delimiter.
    regex String
    delimiter.
    regex string
    delimiter.
    regex str
    delimiter.
    regex String
    delimiter.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstrArgs

    End double
    cut-off position.
    Start double
    interception starting position.
    End float64
    cut-off position.
    Start float64
    interception starting position.
    end Double
    cut-off position.
    start Double
    interception starting position.
    end number
    cut-off position.
    start number
    interception starting position.
    end float
    cut-off position.
    start float
    interception starting position.
    end Number
    cut-off position.
    start Number
    interception starting position.

    CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecodeArgs

    CharsetName string
    code.
    CharsetName string
    code.
    charsetName String
    code.
    charsetName string
    code.
    charset_name str
    code.
    charsetName String
    code.

    CkafkaDatahubTaskTransformsParamFieldChainSMT, CkafkaDatahubTaskTransformsParamFieldChainSMTArgs

    Key string
    KEY.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    VALUE.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process.
    ValueOperates List<CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate>
    VALUE process chain.
    Key string
    KEY.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    VALUE.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process.
    ValueOperates []CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process chain.
    key String
    KEY.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    VALUE.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process.
    valueOperates List<CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate>
    VALUE process chain.
    key string
    KEY.
    operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue string
    OriginalValue.
    value string
    VALUE.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process.
    valueOperates CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate[]
    VALUE process chain.
    key str
    KEY.
    operate str
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    scheme_type str
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    original_value str
    OriginalValue.
    value str
    VALUE.
    value_operate CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate
    VALUE process.
    value_operates Sequence[CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate]
    VALUE process chain.
    key String
    KEY.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    VALUE.
    valueOperate Property Map
    VALUE process.
    valueOperates List<Property Map>
    VALUE process chain.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateArgs

    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode
    Url parsing.
    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace
    replace, TYPE=REPLACE is required.
    result String
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode
    Url parsing.
    type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace
    replace, TYPE=REPLACE is required.
    result string
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode
    Url parsing.
    type str
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate
    Time conversion, required when TYPE=DATE.
    json_path_replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    k_v CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regex_replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace
    replace, TYPE=REPLACE is required.
    result str
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    url_decode CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date Property Map
    Time conversion, required when TYPE=DATE.
    jsonPathReplace Property Map
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV Property Map
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace Property Map
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace Property Map
    replace, TYPE=REPLACE is required.
    result String
    result.
    split Property Map
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr Property Map
    Substr, TYPE=SUBSTR is required.
    urlDecode Property Map
    Url parsing.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDateArgs

    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.
    format string
    Time format.
    targetType string
    input type, string|unix.
    timeZone string
    default GMT+8.
    format str
    Time format.
    target_type str
    input type, string|unix.
    time_zone str
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplaceArgs

    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.
    newValue string
    Replacement value, Jsonpath expression or string.
    oldValue string
    Replaced value, Jsonpath expression.
    new_value str
    Replacement value, Jsonpath expression or string.
    old_value str
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKVArgs

    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.
    delimiter string
    delimiter.
    regex string
    Key-value secondary analysis delimiter.
    keepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter str
    delimiter.
    regex str
    Key-value secondary analysis delimiter.
    keep_original_key str
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplaceArgs

    NewValue string
    new value.
    Regex string
    Regular.
    NewValue string
    new value.
    Regex string
    Regular.
    newValue String
    new value.
    regex String
    Regular.
    newValue string
    new value.
    regex string
    Regular.
    new_value str
    new value.
    regex str
    Regular.
    newValue String
    new value.
    regex String
    Regular.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplaceArgs

    NewValue string
    new value.
    OldValue string
    been replaced value.
    NewValue string
    new value.
    OldValue string
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.
    newValue string
    new value.
    oldValue string
    been replaced value.
    new_value str
    new value.
    old_value str
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplitArgs

    Regex string
    delimiter.
    Regex string
    delimiter.
    regex String
    delimiter.
    regex string
    delimiter.
    regex str
    delimiter.
    regex String
    delimiter.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstrArgs

    End double
    cut-off position.
    Start double
    interception starting position.
    End float64
    cut-off position.
    Start float64
    interception starting position.
    end Double
    cut-off position.
    start Double
    interception starting position.
    end number
    cut-off position.
    start number
    interception starting position.
    end float
    cut-off position.
    start float
    interception starting position.
    end Number
    cut-off position.
    start Number
    interception starting position.

    CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecodeArgs

    CharsetName string
    code.
    CharsetName string
    code.
    charsetName String
    code.
    charsetName string
    code.
    charset_name str
    code.
    charsetName String
    code.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyse, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseArgs

    Regex string
    delimiter.
    Regex string
    delimiter.
    regex String
    delimiter.
    regex string
    delimiter.
    regex str
    delimiter.
    regex String
    delimiter.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultArgs

    Key string
    KEY.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    VALUE.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process.
    ValueOperates List<CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate>
    VALUE process chain.
    Key string
    KEY.
    Operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    SchemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    OriginalValue string
    OriginalValue.
    Value string
    VALUE.
    ValueOperate CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process.
    ValueOperates []CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process chain.
    key String
    KEY.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    VALUE.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process.
    valueOperates List<CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate>
    VALUE process chain.
    key string
    KEY.
    operate string
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType string
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue string
    OriginalValue.
    value string
    VALUE.
    valueOperate CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process.
    valueOperates CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate[]
    VALUE process chain.
    key str
    KEY.
    operate str
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    scheme_type str
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    original_value str
    OriginalValue.
    value str
    VALUE.
    value_operate CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate
    VALUE process.
    value_operates Sequence[CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate]
    VALUE process chain.
    key String
    KEY.
    operate String
    Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
    schemeType String
    data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
    originalValue String
    OriginalValue.
    value String
    VALUE.
    valueOperate Property Map
    VALUE process.
    valueOperates List<Property Map>
    VALUE process chain.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateArgs

    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode
    Url parsing.
    Type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    Date CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    JsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    KV CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    RegexReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    Replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    Result string
    result.
    Split CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    Substr CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    UrlDecode CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result String
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode
    Url parsing.
    type string
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    jsonPathReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result string
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    urlDecode CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode
    Url parsing.
    type str
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate
    Time conversion, required when TYPE=DATE.
    json_path_replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    k_v CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV
    Key-value secondary analysis, must be passed when TYPE=KV.
    regex_replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace
    replace, TYPE=REPLACE is required.
    result str
    result.
    split CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr
    Substr, TYPE=SUBSTR is required.
    url_decode CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode
    Url parsing.
    type String
    Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
    date Property Map
    Time conversion, required when TYPE=DATE.
    jsonPathReplace Property Map
    Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
    kV Property Map
    Key-value secondary analysis, must be passed when TYPE=KV.
    regexReplace Property Map
    Regular replacement, required when TYPE=REGEX REPLACE.
    replace Property Map
    replace, TYPE=REPLACE is required.
    result String
    result.
    split Property Map
    The value supports one split and multiple values, required when TYPE=SPLIT.
    substr Property Map
    Substr, TYPE=SUBSTR is required.
    urlDecode Property Map
    Url parsing.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDateArgs

    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    Format string
    Time format.
    TargetType string
    input type, string|unix.
    TimeZone string
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.
    format string
    Time format.
    targetType string
    input type, string|unix.
    timeZone string
    default GMT+8.
    format str
    Time format.
    target_type str
    input type, string|unix.
    time_zone str
    default GMT+8.
    format String
    Time format.
    targetType String
    input type, string|unix.
    timeZone String
    default GMT+8.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplaceArgs

    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    NewValue string
    Replacement value, Jsonpath expression or string.
    OldValue string
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.
    newValue string
    Replacement value, Jsonpath expression or string.
    oldValue string
    Replaced value, Jsonpath expression.
    new_value str
    Replacement value, Jsonpath expression or string.
    old_value str
    Replaced value, Jsonpath expression.
    newValue String
    Replacement value, Jsonpath expression or string.
    oldValue String
    Replaced value, Jsonpath expression.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKVArgs

    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    Delimiter string
    delimiter.
    Regex string
    Key-value secondary analysis delimiter.
    KeepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.
    delimiter string
    delimiter.
    regex string
    Key-value secondary analysis delimiter.
    keepOriginalKey string
    Keep the source Key, the default is false not to keep.
    delimiter str
    delimiter.
    regex str
    Key-value secondary analysis delimiter.
    keep_original_key str
    Keep the source Key, the default is false not to keep.
    delimiter String
    delimiter.
    regex String
    Key-value secondary analysis delimiter.
    keepOriginalKey String
    Keep the source Key, the default is false not to keep.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplaceArgs

    NewValue string
    new value.
    Regex string
    Regular.
    NewValue string
    new value.
    Regex string
    Regular.
    newValue String
    new value.
    regex String
    Regular.
    newValue string
    new value.
    regex string
    Regular.
    new_value str
    new value.
    regex str
    Regular.
    newValue String
    new value.
    regex String
    Regular.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplaceArgs

    NewValue string
    new value.
    OldValue string
    been replaced value.
    NewValue string
    new value.
    OldValue string
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.
    newValue string
    new value.
    oldValue string
    been replaced value.
    new_value str
    new value.
    old_value str
    been replaced value.
    newValue String
    new value.
    oldValue String
    been replaced value.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplitArgs

    Regex string
    delimiter.
    Regex string
    delimiter.
    regex String
    delimiter.
    regex string
    delimiter.
    regex str
    delimiter.
    regex String
    delimiter.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstrArgs

    End double
    cut-off position.
    Start double
    interception starting position.
    End float64
    cut-off position.
    Start float64
    interception starting position.
    end Double
    cut-off position.
    start Double
    interception starting position.
    end number
    cut-off position.
    start number
    interception starting position.
    end float
    cut-off position.
    start float
    interception starting position.
    end Number
    cut-off position.
    start Number
    interception starting position.

    CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecodeArgs

    CharsetName string
    code.
    CharsetName string
    code.
    charsetName String
    code.
    charsetName string
    code.
    charset_name str
    code.
    charsetName String
    code.

    CkafkaDatahubTaskTransformsParamFilterParam, CkafkaDatahubTaskTransformsParamFilterParamArgs

    Key string
    Key.
    MatchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    Value string
    Value.
    Type string
    REGULAR.
    Key string
    Key.
    MatchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    Value string
    Value.
    Type string
    REGULAR.
    key String
    Key.
    matchMode String
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value String
    Value.
    type String
    REGULAR.
    key string
    Key.
    matchMode string
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value string
    Value.
    type string
    REGULAR.
    key str
    Key.
    match_mode str
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value str
    Value.
    type str
    REGULAR.
    key String
    Key.
    matchMode String
    Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
    value String
    Value.
    type String
    REGULAR.

    CkafkaDatahubTaskTransformsParamRowParam, CkafkaDatahubTaskTransformsParamRowParamArgs

    RowContent string
    row content, KEY_VALUE, VALUE.
    EntryDelimiter string
    delimiter.
    KeyValueDelimiter string
    key, value delimiter.
    RowContent string
    row content, KEY_VALUE, VALUE.
    EntryDelimiter string
    delimiter.
    KeyValueDelimiter string
    key, value delimiter.
    rowContent String
    row content, KEY_VALUE, VALUE.
    entryDelimiter String
    delimiter.
    keyValueDelimiter String
    key, value delimiter.
    rowContent string
    row content, KEY_VALUE, VALUE.
    entryDelimiter string
    delimiter.
    keyValueDelimiter string
    key, value delimiter.
    row_content str
    row content, KEY_VALUE, VALUE.
    entry_delimiter str
    delimiter.
    key_value_delimiter str
    key, value delimiter.
    rowContent String
    row content, KEY_VALUE, VALUE.
    entryDelimiter String
    delimiter.
    keyValueDelimiter String
    key, value delimiter.

    Import

    ckafka datahub_task can be imported using the id, e.g.

    $ pulumi import tencentcloud:index/ckafkaDatahubTask:CkafkaDatahubTask datahub_task datahub_task_id
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    tencentcloud tencentcloudstack/terraform-provider-tencentcloud
    License
    Notes
    This Pulumi package is based on the tencentcloud Terraform Provider.
    tencentcloud logo
    tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack