1. Packages
  2. Tencentcloud Provider
  3. API Docs
  4. getCkafkaDatahubTask
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack

tencentcloud.getCkafkaDatahubTask

Explore with Pulumi AI

tencentcloud logo
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack

    Use this data source to query detailed information of ckafka datahub_task

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as tencentcloud from "@pulumi/tencentcloud";
    
    const datahubTask = tencentcloud.getCkafkaDatahubTask({});
    
    import pulumi
    import pulumi_tencentcloud as tencentcloud
    
    datahub_task = tencentcloud.get_ckafka_datahub_task()
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := tencentcloud.LookupCkafkaDatahubTask(ctx, &tencentcloud.LookupCkafkaDatahubTaskArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Tencentcloud = Pulumi.Tencentcloud;
    
    return await Deployment.RunAsync(() => 
    {
        var datahubTask = Tencentcloud.GetCkafkaDatahubTask.Invoke();
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.tencentcloud.TencentcloudFunctions;
    import com.pulumi.tencentcloud.inputs.GetCkafkaDatahubTaskArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var datahubTask = TencentcloudFunctions.getCkafkaDatahubTask();
    
        }
    }
    
    variables:
      datahubTask:
        fn::invoke:
          function: tencentcloud:getCkafkaDatahubTask
          arguments: {}
    

    Using getCkafkaDatahubTask

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getCkafkaDatahubTask(args: GetCkafkaDatahubTaskArgs, opts?: InvokeOptions): Promise<GetCkafkaDatahubTaskResult>
    function getCkafkaDatahubTaskOutput(args: GetCkafkaDatahubTaskOutputArgs, opts?: InvokeOptions): Output<GetCkafkaDatahubTaskResult>
    def get_ckafka_datahub_task(id: Optional[str] = None,
                                resource: Optional[str] = None,
                                result_output_file: Optional[str] = None,
                                search_word: Optional[str] = None,
                                source_type: Optional[str] = None,
                                target_type: Optional[str] = None,
                                task_type: Optional[str] = None,
                                opts: Optional[InvokeOptions] = None) -> GetCkafkaDatahubTaskResult
    def get_ckafka_datahub_task_output(id: Optional[pulumi.Input[str]] = None,
                                resource: Optional[pulumi.Input[str]] = None,
                                result_output_file: Optional[pulumi.Input[str]] = None,
                                search_word: Optional[pulumi.Input[str]] = None,
                                source_type: Optional[pulumi.Input[str]] = None,
                                target_type: Optional[pulumi.Input[str]] = None,
                                task_type: Optional[pulumi.Input[str]] = None,
                                opts: Optional[InvokeOptions] = None) -> Output[GetCkafkaDatahubTaskResult]
    func LookupCkafkaDatahubTask(ctx *Context, args *LookupCkafkaDatahubTaskArgs, opts ...InvokeOption) (*LookupCkafkaDatahubTaskResult, error)
    func LookupCkafkaDatahubTaskOutput(ctx *Context, args *LookupCkafkaDatahubTaskOutputArgs, opts ...InvokeOption) LookupCkafkaDatahubTaskResultOutput

    > Note: This function is named LookupCkafkaDatahubTask in the Go SDK.

    public static class GetCkafkaDatahubTask 
    {
        public static Task<GetCkafkaDatahubTaskResult> InvokeAsync(GetCkafkaDatahubTaskArgs args, InvokeOptions? opts = null)
        public static Output<GetCkafkaDatahubTaskResult> Invoke(GetCkafkaDatahubTaskInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetCkafkaDatahubTaskResult> getCkafkaDatahubTask(GetCkafkaDatahubTaskArgs args, InvokeOptions options)
    public static Output<GetCkafkaDatahubTaskResult> getCkafkaDatahubTask(GetCkafkaDatahubTaskArgs args, InvokeOptions options)
    
    fn::invoke:
      function: tencentcloud:index/getCkafkaDatahubTask:getCkafkaDatahubTask
      arguments:
        # arguments dictionary

    The following arguments are supported:

    Id string
    Resource string
    Resource.
    ResultOutputFile string
    Used to save results.
    SearchWord string
    search key.
    SourceType string
    The source type.
    TargetType string
    Destination type of dump.
    TaskType string
    Task type, SOURCE|SINK.
    Id string
    Resource string
    Resource.
    ResultOutputFile string
    Used to save results.
    SearchWord string
    search key.
    SourceType string
    The source type.
    TargetType string
    Destination type of dump.
    TaskType string
    Task type, SOURCE|SINK.
    id String
    resource String
    Resource.
    resultOutputFile String
    Used to save results.
    searchWord String
    search key.
    sourceType String
    The source type.
    targetType String
    Destination type of dump.
    taskType String
    Task type, SOURCE|SINK.
    id string
    resource string
    Resource.
    resultOutputFile string
    Used to save results.
    searchWord string
    search key.
    sourceType string
    The source type.
    targetType string
    Destination type of dump.
    taskType string
    Task type, SOURCE|SINK.
    id str
    resource str
    Resource.
    result_output_file str
    Used to save results.
    search_word str
    search key.
    source_type str
    The source type.
    target_type str
    Destination type of dump.
    task_type str
    Task type, SOURCE|SINK.
    id String
    resource String
    Resource.
    resultOutputFile String
    Used to save results.
    searchWord String
    search key.
    sourceType String
    The source type.
    targetType String
    Destination type of dump.
    taskType String
    Task type, SOURCE|SINK.

    getCkafkaDatahubTask Result

    The following output properties are available:

    Id string
    TaskLists List<GetCkafkaDatahubTaskTaskList>
    Datahub task information list.
    Resource string
    The topic name of the topic sold separately.
    ResultOutputFile string
    SearchWord string
    SourceType string
    TargetType string
    TaskType string
    TaskType, SOURCE|SINK.
    Id string
    TaskLists []GetCkafkaDatahubTaskTaskList
    Datahub task information list.
    Resource string
    The topic name of the topic sold separately.
    ResultOutputFile string
    SearchWord string
    SourceType string
    TargetType string
    TaskType string
    TaskType, SOURCE|SINK.
    id String
    taskLists List<GetCkafkaDatahubTaskTaskList>
    Datahub task information list.
    resource String
    The topic name of the topic sold separately.
    resultOutputFile String
    searchWord String
    sourceType String
    targetType String
    taskType String
    TaskType, SOURCE|SINK.
    id string
    taskLists GetCkafkaDatahubTaskTaskList[]
    Datahub task information list.
    resource string
    The topic name of the topic sold separately.
    resultOutputFile string
    searchWord string
    sourceType string
    targetType string
    taskType string
    TaskType, SOURCE|SINK.
    id str
    task_lists Sequence[GetCkafkaDatahubTaskTaskList]
    Datahub task information list.
    resource str
    The topic name of the topic sold separately.
    result_output_file str
    search_word str
    source_type str
    target_type str
    task_type str
    TaskType, SOURCE|SINK.
    id String
    taskLists List<Property Map>
    Datahub task information list.
    resource String
    The topic name of the topic sold separately.
    resultOutputFile String
    searchWord String
    sourceType String
    targetType String
    taskType String
    TaskType, SOURCE|SINK.

    Supporting Types

    GetCkafkaDatahubTaskTaskList

    CreateTime string
    CreateTime.
    DatahubId string
    Datahub Id.
    ErrorMessage string
    ErrorMessage.
    SourceResources List<GetCkafkaDatahubTaskTaskListSourceResource>
    data resource.
    Status double
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    StepLists List<string>
    StepList.
    TargetResources List<GetCkafkaDatahubTaskTaskListTargetResource>
    Target Resource.
    TaskCurrentStep string
    Task Current Step.
    TaskId string
    task ID.
    TaskName string
    TaskName.
    TaskProgress double
    Creation progress percentage.
    TaskType string
    Task type, SOURCE|SINK.
    CreateTime string
    CreateTime.
    DatahubId string
    Datahub Id.
    ErrorMessage string
    ErrorMessage.
    SourceResources []GetCkafkaDatahubTaskTaskListSourceResource
    data resource.
    Status float64
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    StepLists []string
    StepList.
    TargetResources []GetCkafkaDatahubTaskTaskListTargetResource
    Target Resource.
    TaskCurrentStep string
    Task Current Step.
    TaskId string
    task ID.
    TaskName string
    TaskName.
    TaskProgress float64
    Creation progress percentage.
    TaskType string
    Task type, SOURCE|SINK.
    createTime String
    CreateTime.
    datahubId String
    Datahub Id.
    errorMessage String
    ErrorMessage.
    sourceResources List<GetCkafkaDatahubTaskTaskListSourceResource>
    data resource.
    status Double
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    stepLists List<String>
    StepList.
    targetResources List<GetCkafkaDatahubTaskTaskListTargetResource>
    Target Resource.
    taskCurrentStep String
    Task Current Step.
    taskId String
    task ID.
    taskName String
    TaskName.
    taskProgress Double
    Creation progress percentage.
    taskType String
    Task type, SOURCE|SINK.
    createTime string
    CreateTime.
    datahubId string
    Datahub Id.
    errorMessage string
    ErrorMessage.
    sourceResources GetCkafkaDatahubTaskTaskListSourceResource[]
    data resource.
    status number
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    stepLists string[]
    StepList.
    targetResources GetCkafkaDatahubTaskTaskListTargetResource[]
    Target Resource.
    taskCurrentStep string
    Task Current Step.
    taskId string
    task ID.
    taskName string
    TaskName.
    taskProgress number
    Creation progress percentage.
    taskType string
    Task type, SOURCE|SINK.
    create_time str
    CreateTime.
    datahub_id str
    Datahub Id.
    error_message str
    ErrorMessage.
    source_resources Sequence[GetCkafkaDatahubTaskTaskListSourceResource]
    data resource.
    status float
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    step_lists Sequence[str]
    StepList.
    target_resources Sequence[GetCkafkaDatahubTaskTaskListTargetResource]
    Target Resource.
    task_current_step str
    Task Current Step.
    task_id str
    task ID.
    task_name str
    TaskName.
    task_progress float
    Creation progress percentage.
    task_type str
    Task type, SOURCE|SINK.
    createTime String
    CreateTime.
    datahubId String
    Datahub Id.
    errorMessage String
    ErrorMessage.
    sourceResources List<Property Map>
    data resource.
    status Number
    Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
    stepLists List<String>
    StepList.
    targetResources List<Property Map>
    Target Resource.
    taskCurrentStep String
    Task Current Step.
    taskId String
    task ID.
    taskName String
    TaskName.
    taskProgress Number
    Creation progress percentage.
    taskType String
    Task type, SOURCE|SINK.

    GetCkafkaDatahubTaskTaskListSourceResource

    ClickHouseParams List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam>
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParams List<GetCkafkaDatahubTaskTaskListSourceResourceClsParam>
    Cls configuration, Required when Type is CLS.
    CosParams List<GetCkafkaDatahubTaskTaskListSourceResourceCosParam>
    Cos configuration, required when Type is COS.
    CtsdbParams List<GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam>
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParams List<GetCkafkaDatahubTaskTaskListSourceResourceDtsParam>
    Dts configuration, required when Type is DTS.
    EsParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParam>
    Es configuration, required when Type is ES.
    EventBusParams List<GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam>
    EB configuration, required when type is EB.
    KafkaParams List<GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    MariaDbParams List<GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam>
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParams List<GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam>
    MongoDB config, Required when Type is MONGODB.
    MySqlParams List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam>
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParams List<GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParams List<GetCkafkaDatahubTaskTaskListSourceResourceScfParam>
    Scf configuration, Required when Type is SCF.
    SqlServerParams List<GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam>
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParams List<GetCkafkaDatahubTaskTaskListSourceResourceTdwParam>
    Tdw configuration, required when Type is TDW.
    TopicParams List<GetCkafkaDatahubTaskTaskListSourceResourceTopicParam>
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    ClickHouseParams []GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParams []GetCkafkaDatahubTaskTaskListSourceResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParams []GetCkafkaDatahubTaskTaskListSourceResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParams []GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParams []GetCkafkaDatahubTaskTaskListSourceResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParams []GetCkafkaDatahubTaskTaskListSourceResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParams []GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParams []GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParams []GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParams []GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParams []GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParams []GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParams []GetCkafkaDatahubTaskTaskListSourceResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParams []GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParams []GetCkafkaDatahubTaskTaskListSourceResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParams []GetCkafkaDatahubTaskTaskListSourceResourceTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    clickHouseParams List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam>
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams List<GetCkafkaDatahubTaskTaskListSourceResourceClsParam>
    Cls configuration, Required when Type is CLS.
    cosParams List<GetCkafkaDatahubTaskTaskListSourceResourceCosParam>
    Cos configuration, required when Type is COS.
    ctsdbParams List<GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam>
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams List<GetCkafkaDatahubTaskTaskListSourceResourceDtsParam>
    Dts configuration, required when Type is DTS.
    esParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParam>
    Es configuration, required when Type is ES.
    eventBusParams List<GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam>
    EB configuration, required when type is EB.
    kafkaParams List<GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams List<GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam>
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams List<GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam>
    MongoDB config, Required when Type is MONGODB.
    mySqlParams List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam>
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams List<GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams List<GetCkafkaDatahubTaskTaskListSourceResourceScfParam>
    Scf configuration, Required when Type is SCF.
    sqlServerParams List<GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam>
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams List<GetCkafkaDatahubTaskTaskListSourceResourceTdwParam>
    Tdw configuration, required when Type is TDW.
    topicParams List<GetCkafkaDatahubTaskTaskListSourceResourceTopicParam>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    clickHouseParams GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam[]
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams GetCkafkaDatahubTaskTaskListSourceResourceClsParam[]
    Cls configuration, Required when Type is CLS.
    cosParams GetCkafkaDatahubTaskTaskListSourceResourceCosParam[]
    Cos configuration, required when Type is COS.
    ctsdbParams GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam[]
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams GetCkafkaDatahubTaskTaskListSourceResourceDtsParam[]
    Dts configuration, required when Type is DTS.
    esParams GetCkafkaDatahubTaskTaskListSourceResourceEsParam[]
    Es configuration, required when Type is ES.
    eventBusParams GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam[]
    EB configuration, required when type is EB.
    kafkaParams GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam[]
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam[]
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam[]
    MongoDB config, Required when Type is MONGODB.
    mySqlParams GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam[]
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam[]
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams GetCkafkaDatahubTaskTaskListSourceResourceScfParam[]
    Scf configuration, Required when Type is SCF.
    sqlServerParams GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam[]
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams GetCkafkaDatahubTaskTaskListSourceResourceTdwParam[]
    Tdw configuration, required when Type is TDW.
    topicParams GetCkafkaDatahubTaskTaskListSourceResourceTopicParam[]
    Topic configuration, Required when Type is Topic.
    type string
    Resource Type.
    click_house_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam]
    ClickHouse config, Type CLICKHOUSE requierd.
    cls_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceClsParam]
    Cls configuration, Required when Type is CLS.
    cos_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceCosParam]
    Cos configuration, required when Type is COS.
    ctsdb_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam]
    Ctsdb configuration, Required when Type is CTSDB.
    dts_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceDtsParam]
    Dts configuration, required when Type is DTS.
    es_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParam]
    Es configuration, required when Type is ES.
    event_bus_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam]
    EB configuration, required when type is EB.
    kafka_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam]
    ckafka configuration, required when Type is KAFKA.
    maria_db_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam]
    MariaDB configuration, Required when Type is MARIADB.
    mongo_db_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam]
    MongoDB config, Required when Type is MONGODB.
    my_sql_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam]
    MySQL configuration, Required when Type is MYSQL.
    postgre_sql_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam]
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scf_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceScfParam]
    Scf configuration, Required when Type is SCF.
    sql_server_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam]
    SQLServer configuration, Required when Type is SQLSERVER.
    tdw_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceTdwParam]
    Tdw configuration, required when Type is TDW.
    topic_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceTopicParam]
    Topic configuration, Required when Type is Topic.
    type str
    Resource Type.
    clickHouseParams List<Property Map>
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams List<Property Map>
    Cls configuration, Required when Type is CLS.
    cosParams List<Property Map>
    Cos configuration, required when Type is COS.
    ctsdbParams List<Property Map>
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams List<Property Map>
    Dts configuration, required when Type is DTS.
    esParams List<Property Map>
    Es configuration, required when Type is ES.
    eventBusParams List<Property Map>
    EB configuration, required when type is EB.
    kafkaParams List<Property Map>
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams List<Property Map>
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams List<Property Map>
    MongoDB config, Required when Type is MONGODB.
    mySqlParams List<Property Map>
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams List<Property Map>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams List<Property Map>
    Scf configuration, Required when Type is SCF.
    sqlServerParams List<Property Map>
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams List<Property Map>
    Tdw configuration, required when Type is TDW.
    topicParams List<Property Map>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam

    Cluster string
    ClickHouse cluster.
    Database string
    SQLServer database name.
    DropCls List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Ip string
    Mongo DB connection ip.
    Password string
    MongoDB database password.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    Schemas List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema>
    ClickHouse schema.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Type string
    Resource Type.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    Cluster string
    ClickHouse cluster.
    Database string
    SQLServer database name.
    DropCls []GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Ip string
    Mongo DB connection ip.
    Password string
    MongoDB database password.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    Schemas []GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema
    ClickHouse schema.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Type string
    Resource Type.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    cluster String
    ClickHouse cluster.
    database String
    SQLServer database name.
    dropCls List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    ip String
    Mongo DB connection ip.
    password String
    MongoDB database password.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    schemas List<GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema>
    ClickHouse schema.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type String
    Resource Type.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.
    cluster string
    ClickHouse cluster.
    database string
    SQLServer database name.
    dropCls GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    ip string
    Mongo DB connection ip.
    password string
    MongoDB database password.
    port number
    MongoDB connection port.
    resource string
    Resource.
    schemas GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema[]
    ClickHouse schema.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type string
    Resource Type.
    uniqVpcId string
    instance vpc id.
    userName string
    MongoDB database user name.
    cluster str
    ClickHouse cluster.
    database str
    SQLServer database name.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    ip str
    Mongo DB connection ip.
    password str
    MongoDB database password.
    port float
    MongoDB connection port.
    resource str
    Resource.
    schemas Sequence[GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema]
    ClickHouse schema.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type str
    Resource Type.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    MongoDB database user name.
    cluster String
    ClickHouse cluster.
    database String
    SQLServer database name.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    ip String
    Mongo DB connection ip.
    password String
    MongoDB database password.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    schemas List<Property Map>
    ClickHouse schema.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type String
    Resource Type.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema

    AllowNull bool
    Whether the message is allowed to be empty.
    ColumnName string
    Column Name.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    ColumnName string
    Column Name.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    columnName String
    Column Name.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    columnName string
    Column Name.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    column_name str
    Column Name.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    columnName String
    Column Name.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceClsParam

    ContentKey string
    key for data in non-json format.
    DecodeJson bool
    Whether the produced information is in json format.
    LogSet string
    LogSet id.
    Resource string
    Resource.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    ContentKey string
    key for data in non-json format.
    DecodeJson bool
    Whether the produced information is in json format.
    LogSet string
    LogSet id.
    Resource string
    Resource.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey String
    key for data in non-json format.
    decodeJson Boolean
    Whether the produced information is in json format.
    logSet String
    LogSet id.
    resource String
    Resource.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey string
    key for data in non-json format.
    decodeJson boolean
    Whether the produced information is in json format.
    logSet string
    LogSet id.
    resource string
    Resource.
    timeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    content_key str
    key for data in non-json format.
    decode_json bool
    Whether the produced information is in json format.
    log_set str
    LogSet id.
    resource str
    Resource.
    time_field str
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey String
    key for data in non-json format.
    decodeJson Boolean
    Whether the produced information is in json format.
    logSet String
    LogSet id.
    resource String
    Resource.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.

    GetCkafkaDatahubTaskTaskListSourceResourceCosParam

    AggregateBatchSize double
    The size of aggregated messages MB.
    AggregateInterval double
    time interval.
    BucketName string
    cos bucket name.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    Region string
    region code.
    AggregateBatchSize float64
    The size of aggregated messages MB.
    AggregateInterval float64
    time interval.
    BucketName string
    cos bucket name.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    Region string
    region code.
    aggregateBatchSize Double
    The size of aggregated messages MB.
    aggregateInterval Double
    time interval.
    bucketName String
    cos bucket name.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    region String
    region code.
    aggregateBatchSize number
    The size of aggregated messages MB.
    aggregateInterval number
    time interval.
    bucketName string
    cos bucket name.
    directoryTimeFormat string
    Partition format formatted according to strptime time.
    formatOutputType string
    The file format after message aggregation csv|json.
    objectKey string
    ObjectKey.
    objectKeyPrefix string
    Dumped object directory prefix.
    region string
    region code.
    aggregate_batch_size float
    The size of aggregated messages MB.
    aggregate_interval float
    time interval.
    bucket_name str
    cos bucket name.
    directory_time_format str
    Partition format formatted according to strptime time.
    format_output_type str
    The file format after message aggregation csv|json.
    object_key str
    ObjectKey.
    object_key_prefix str
    Dumped object directory prefix.
    region str
    region code.
    aggregateBatchSize Number
    The size of aggregated messages MB.
    aggregateInterval Number
    time interval.
    bucketName String
    cos bucket name.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    region String
    region code.

    GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam

    CtsdbMetric string
    Ctsdb metric.
    Resource string
    Resource.
    CtsdbMetric string
    Ctsdb metric.
    Resource string
    Resource.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    Resource.
    ctsdbMetric string
    Ctsdb metric.
    resource string
    Resource.
    ctsdb_metric str
    Ctsdb metric.
    resource str
    Resource.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    Resource.

    GetCkafkaDatahubTaskTaskListSourceResourceDtsParam

    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Mongo DB connection ip.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    Topic string
    Topic name, use , when more than 1 topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Mongo DB connection ip.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    Topic string
    Topic name, use , when more than 1 topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Mongo DB connection ip.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    topic String
    Topic name, use , when more than 1 topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId string
    Dts consumer group Id.
    groupPassword string
    Dts consumer group passwd.
    groupUser string
    Dts account.
    ip string
    Mongo DB connection ip.
    port number
    MongoDB connection port.
    resource string
    Resource.
    topic string
    Topic name, use , when more than 1 topic.
    tranSql boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    group_id str
    Dts consumer group Id.
    group_password str
    Dts consumer group passwd.
    group_user str
    Dts account.
    ip str
    Mongo DB connection ip.
    port float
    MongoDB connection port.
    resource str
    Resource.
    topic str
    Topic name, use , when more than 1 topic.
    tran_sql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Mongo DB connection ip.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    topic String
    Topic name, use , when more than 1 topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParam

    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlqs List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq>
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    MongoDB database password.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls []GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlqs []GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    MongoDB database password.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq>
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    MongoDB database password.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.
    contentKey string
    key for data in non-json format.
    databasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat string
    Es date suffix.
    documentIdField string
    The field name of the document ID value dumped into Es.
    dropCls GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq[]
    dead letter queue.
    dropInvalidJsonMessage boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    index string
    Es index name.
    indexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password string
    MongoDB database password.
    port number
    MongoDB connection port.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    uniqVpcId string
    instance vpc id.
    userName string
    MongoDB database user name.
    content_key str
    key for data in non-json format.
    database_primary_key str
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    date_format str
    Es date suffix.
    document_id_field str
    The field name of the document ID value dumped into Es.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_dlqs Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq]
    dead letter queue.
    drop_invalid_json_message bool
    Whether Es discards messages in non-json format.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    index str
    Es index name.
    index_type str
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password str
    MongoDB database password.
    port float
    MongoDB connection port.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    MongoDB database user name.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs List<Property Map>
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    MongoDB database password.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq

    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam>
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParams []GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParams []GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParams List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParams GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam[]
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParams GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam[]
    Topic configuration, Required when Type is Topic.
    type string
    Resource Type.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam]
    ckafka configuration, required when Type is KAFKA.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_params Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam]
    Topic configuration, Required when Type is Topic.
    type str
    Resource Type.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParams List<Property Map>
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParams List<Property Map>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings []GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType string
    ConnectorSyncType.
    enableToleration boolean
    enable dead letter queue.
    keepPartition boolean
    KeepPartition.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resource string
    Resource.
    resourceName string
    instance name.
    selfBuilt boolean
    Whether it is a self-built cluster.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connector_sync_type str
    ConnectorSyncType.
    enable_toleration bool
    enable dead letter queue.
    keep_partition bool
    KeepPartition.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource str
    Resource.
    resource_name str
    instance name.
    self_built bool
    Whether it is a self-built cluster.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    table_mappings Sequence[GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping

    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    database string
    SQLServer database name.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    database str
    SQLServer database name.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.

    GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource string
    Resource.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource str
    Resource.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam

    FunctionName string
    SCF function name.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    Resource Type.
    FunctionName string
    SCF function name.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    Resource Type.
    functionName String
    SCF function name.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    Resource Type.
    functionName string
    SCF function name.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    type string
    Resource Type.
    function_name str
    SCF function name.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    type str
    Resource Type.
    functionName String
    SCF function name.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings List<GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings []GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType string
    ConnectorSyncType.
    enableToleration boolean
    enable dead letter queue.
    keepPartition boolean
    KeepPartition.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resource string
    Resource.
    resourceName string
    instance name.
    selfBuilt boolean
    Whether it is a self-built cluster.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connector_sync_type str
    ConnectorSyncType.
    enable_toleration bool
    enable dead letter queue.
    keep_partition bool
    KeepPartition.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource str
    Resource.
    resource_name str
    instance name.
    self_built bool
    Whether it is a self-built cluster.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    table_mappings Sequence[GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping

    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    database string
    SQLServer database name.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    database str
    SQLServer database name.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.

    GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam

    Database string
    SQLServer database name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Database string
    SQLServer database name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database string
    SQLServer database name.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database str
    SQLServer database name.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam

    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    SQLServer database name.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port double
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    SQLServer database name.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port float64
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    SQLServer database name.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Double
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.
    collection string
    MongoDB collection.
    copyExisting boolean
    Whether to copy the stock data, the default parameter is true.
    database string
    SQLServer database name.
    ip string
    Mongo DB connection ip.
    listeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password string
    MongoDB database password.
    pipeline string
    aggregation pipeline.
    port number
    MongoDB connection port.
    readPreference string
    Master-slave priority, default master node.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    userName string
    MongoDB database user name.
    collection str
    MongoDB collection.
    copy_existing bool
    Whether to copy the stock data, the default parameter is true.
    database str
    SQLServer database name.
    ip str
    Mongo DB connection ip.
    listening_event str
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password str
    MongoDB database password.
    pipeline str
    aggregation pipeline.
    port float
    MongoDB connection port.
    read_preference str
    Master-slave priority, default master node.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    user_name str
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    SQLServer database name.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Number
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam

    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls []GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls List<GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.
    dataSourceIncrementColumn string
    the name of the column to be monitored.
    dataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    database string
    SQLServer database name.
    ddlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    signalDatabase string
    database name of signal table.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement string
    TopicRegex, $1, $2.
    data_source_increment_column str
    the name of the column to be monitored.
    data_source_increment_mode str
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    data_source_monitor_mode str
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    data_source_monitor_resource str
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    data_source_start_from str
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    database str
    SQLServer database name.
    ddl_topic str
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    signal_database str
    database name of signal table.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic_regex str
    Regular expression for routing events to specific topics, defaults to (.*).
    topic_replacement str
    TopicRegex, $1, $2.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.

    GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam

    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    database string
    SQLServer database name.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    data_format str
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    database str
    SQLServer database name.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    plugin_name str
    (decoderbufs/pgoutput), default decoderbufs.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListSourceResourceScfParam

    BatchSize double
    The maximum number of messages sent in each batch, the default is 1000.
    FunctionName string
    SCF function name.
    MaxRetries double
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    BatchSize float64
    The maximum number of messages sent in each batch, the default is 1000.
    FunctionName string
    SCF function name.
    MaxRetries float64
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize Double
    The maximum number of messages sent in each batch, the default is 1000.
    functionName String
    SCF function name.
    maxRetries Double
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize number
    The maximum number of messages sent in each batch, the default is 1000.
    functionName string
    SCF function name.
    maxRetries number
    The number of retries after the SCF call fails, the default is 5.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    batch_size float
    The maximum number of messages sent in each batch, the default is 1000.
    function_name str
    SCF function name.
    max_retries float
    The number of retries after the SCF call fails, the default is 5.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize Number
    The maximum number of messages sent in each batch, the default is 1000.
    functionName String
    SCF function name.
    maxRetries Number
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.

    GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam

    Database string
    SQLServer database name.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Database string
    SQLServer database name.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database string
    SQLServer database name.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database str
    SQLServer database name.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListSourceResourceTdwParam

    Bid string
    Tdw bid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort double
    TDW port, default 8099.
    Tid string
    Tdw tid.
    Bid string
    Tdw bid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort float64
    TDW port, default 8099.
    Tid string
    Tdw tid.
    bid String
    Tdw bid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Double
    TDW port, default 8099.
    tid String
    Tdw tid.
    bid string
    Tdw bid.
    isDomestic boolean
    default true.
    tdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort number
    TDW port, default 8099.
    tid string
    Tdw tid.
    bid str
    Tdw bid.
    is_domestic bool
    default true.
    tdw_host str
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdw_port float
    TDW port, default 8099.
    tid str
    Tdw tid.
    bid String
    Tdw bid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Number
    TDW port, default 8099.
    tid String
    Tdw tid.

    GetCkafkaDatahubTaskTaskListSourceResourceTopicParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource string
    Resource.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource str
    Resource.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    GetCkafkaDatahubTaskTaskListTargetResource

    ClickHouseParams List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam>
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParams List<GetCkafkaDatahubTaskTaskListTargetResourceClsParam>
    Cls configuration, Required when Type is CLS.
    CosParams List<GetCkafkaDatahubTaskTaskListTargetResourceCosParam>
    Cos configuration, required when Type is COS.
    CtsdbParams List<GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam>
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParams List<GetCkafkaDatahubTaskTaskListTargetResourceDtsParam>
    Dts configuration, required when Type is DTS.
    EsParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParam>
    Es configuration, required when Type is ES.
    EventBusParams List<GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam>
    EB configuration, required when type is EB.
    KafkaParams List<GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    MariaDbParams List<GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam>
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParams List<GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam>
    MongoDB config, Required when Type is MONGODB.
    MySqlParams List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam>
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParams List<GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParams List<GetCkafkaDatahubTaskTaskListTargetResourceScfParam>
    Scf configuration, Required when Type is SCF.
    SqlServerParams List<GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam>
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParams List<GetCkafkaDatahubTaskTaskListTargetResourceTdwParam>
    Tdw configuration, required when Type is TDW.
    TopicParams List<GetCkafkaDatahubTaskTaskListTargetResourceTopicParam>
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    ClickHouseParams []GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam
    ClickHouse config, Type CLICKHOUSE requierd.
    ClsParams []GetCkafkaDatahubTaskTaskListTargetResourceClsParam
    Cls configuration, Required when Type is CLS.
    CosParams []GetCkafkaDatahubTaskTaskListTargetResourceCosParam
    Cos configuration, required when Type is COS.
    CtsdbParams []GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam
    Ctsdb configuration, Required when Type is CTSDB.
    DtsParams []GetCkafkaDatahubTaskTaskListTargetResourceDtsParam
    Dts configuration, required when Type is DTS.
    EsParams []GetCkafkaDatahubTaskTaskListTargetResourceEsParam
    Es configuration, required when Type is ES.
    EventBusParams []GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam
    EB configuration, required when type is EB.
    KafkaParams []GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MariaDbParams []GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam
    MariaDB configuration, Required when Type is MARIADB.
    MongoDbParams []GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam
    MongoDB config, Required when Type is MONGODB.
    MySqlParams []GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam
    MySQL configuration, Required when Type is MYSQL.
    PostgreSqlParams []GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    ScfParams []GetCkafkaDatahubTaskTaskListTargetResourceScfParam
    Scf configuration, Required when Type is SCF.
    SqlServerParams []GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam
    SQLServer configuration, Required when Type is SQLSERVER.
    TdwParams []GetCkafkaDatahubTaskTaskListTargetResourceTdwParam
    Tdw configuration, required when Type is TDW.
    TopicParams []GetCkafkaDatahubTaskTaskListTargetResourceTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    clickHouseParams List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam>
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams List<GetCkafkaDatahubTaskTaskListTargetResourceClsParam>
    Cls configuration, Required when Type is CLS.
    cosParams List<GetCkafkaDatahubTaskTaskListTargetResourceCosParam>
    Cos configuration, required when Type is COS.
    ctsdbParams List<GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam>
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams List<GetCkafkaDatahubTaskTaskListTargetResourceDtsParam>
    Dts configuration, required when Type is DTS.
    esParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParam>
    Es configuration, required when Type is ES.
    eventBusParams List<GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam>
    EB configuration, required when type is EB.
    kafkaParams List<GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams List<GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam>
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams List<GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam>
    MongoDB config, Required when Type is MONGODB.
    mySqlParams List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam>
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams List<GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams List<GetCkafkaDatahubTaskTaskListTargetResourceScfParam>
    Scf configuration, Required when Type is SCF.
    sqlServerParams List<GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam>
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams List<GetCkafkaDatahubTaskTaskListTargetResourceTdwParam>
    Tdw configuration, required when Type is TDW.
    topicParams List<GetCkafkaDatahubTaskTaskListTargetResourceTopicParam>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    clickHouseParams GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam[]
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams GetCkafkaDatahubTaskTaskListTargetResourceClsParam[]
    Cls configuration, Required when Type is CLS.
    cosParams GetCkafkaDatahubTaskTaskListTargetResourceCosParam[]
    Cos configuration, required when Type is COS.
    ctsdbParams GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam[]
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams GetCkafkaDatahubTaskTaskListTargetResourceDtsParam[]
    Dts configuration, required when Type is DTS.
    esParams GetCkafkaDatahubTaskTaskListTargetResourceEsParam[]
    Es configuration, required when Type is ES.
    eventBusParams GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam[]
    EB configuration, required when type is EB.
    kafkaParams GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam[]
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam[]
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam[]
    MongoDB config, Required when Type is MONGODB.
    mySqlParams GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam[]
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam[]
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams GetCkafkaDatahubTaskTaskListTargetResourceScfParam[]
    Scf configuration, Required when Type is SCF.
    sqlServerParams GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam[]
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams GetCkafkaDatahubTaskTaskListTargetResourceTdwParam[]
    Tdw configuration, required when Type is TDW.
    topicParams GetCkafkaDatahubTaskTaskListTargetResourceTopicParam[]
    Topic configuration, Required when Type is Topic.
    type string
    Resource Type.
    click_house_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam]
    ClickHouse config, Type CLICKHOUSE requierd.
    cls_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceClsParam]
    Cls configuration, Required when Type is CLS.
    cos_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceCosParam]
    Cos configuration, required when Type is COS.
    ctsdb_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam]
    Ctsdb configuration, Required when Type is CTSDB.
    dts_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceDtsParam]
    Dts configuration, required when Type is DTS.
    es_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParam]
    Es configuration, required when Type is ES.
    event_bus_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam]
    EB configuration, required when type is EB.
    kafka_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam]
    ckafka configuration, required when Type is KAFKA.
    maria_db_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam]
    MariaDB configuration, Required when Type is MARIADB.
    mongo_db_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam]
    MongoDB config, Required when Type is MONGODB.
    my_sql_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam]
    MySQL configuration, Required when Type is MYSQL.
    postgre_sql_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam]
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scf_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceScfParam]
    Scf configuration, Required when Type is SCF.
    sql_server_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam]
    SQLServer configuration, Required when Type is SQLSERVER.
    tdw_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceTdwParam]
    Tdw configuration, required when Type is TDW.
    topic_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceTopicParam]
    Topic configuration, Required when Type is Topic.
    type str
    Resource Type.
    clickHouseParams List<Property Map>
    ClickHouse config, Type CLICKHOUSE requierd.
    clsParams List<Property Map>
    Cls configuration, Required when Type is CLS.
    cosParams List<Property Map>
    Cos configuration, required when Type is COS.
    ctsdbParams List<Property Map>
    Ctsdb configuration, Required when Type is CTSDB.
    dtsParams List<Property Map>
    Dts configuration, required when Type is DTS.
    esParams List<Property Map>
    Es configuration, required when Type is ES.
    eventBusParams List<Property Map>
    EB configuration, required when type is EB.
    kafkaParams List<Property Map>
    ckafka configuration, required when Type is KAFKA.
    mariaDbParams List<Property Map>
    MariaDB configuration, Required when Type is MARIADB.
    mongoDbParams List<Property Map>
    MongoDB config, Required when Type is MONGODB.
    mySqlParams List<Property Map>
    MySQL configuration, Required when Type is MYSQL.
    postgreSqlParams List<Property Map>
    PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
    scfParams List<Property Map>
    Scf configuration, Required when Type is SCF.
    sqlServerParams List<Property Map>
    SQLServer configuration, Required when Type is SQLSERVER.
    tdwParams List<Property Map>
    Tdw configuration, required when Type is TDW.
    topicParams List<Property Map>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam

    Cluster string
    ClickHouse cluster.
    Database string
    SQLServer database name.
    DropCls List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Ip string
    Mongo DB connection ip.
    Password string
    MongoDB database password.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    Schemas List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema>
    ClickHouse schema.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Type string
    Resource Type.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    Cluster string
    ClickHouse cluster.
    Database string
    SQLServer database name.
    DropCls []GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Ip string
    Mongo DB connection ip.
    Password string
    MongoDB database password.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    Schemas []GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema
    ClickHouse schema.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Type string
    Resource Type.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    cluster String
    ClickHouse cluster.
    database String
    SQLServer database name.
    dropCls List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    ip String
    Mongo DB connection ip.
    password String
    MongoDB database password.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    schemas List<GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema>
    ClickHouse schema.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type String
    Resource Type.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.
    cluster string
    ClickHouse cluster.
    database string
    SQLServer database name.
    dropCls GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    ip string
    Mongo DB connection ip.
    password string
    MongoDB database password.
    port number
    MongoDB connection port.
    resource string
    Resource.
    schemas GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema[]
    ClickHouse schema.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type string
    Resource Type.
    uniqVpcId string
    instance vpc id.
    userName string
    MongoDB database user name.
    cluster str
    ClickHouse cluster.
    database str
    SQLServer database name.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    ip str
    Mongo DB connection ip.
    password str
    MongoDB database password.
    port float
    MongoDB connection port.
    resource str
    Resource.
    schemas Sequence[GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema]
    ClickHouse schema.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type str
    Resource Type.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    MongoDB database user name.
    cluster String
    ClickHouse cluster.
    database String
    SQLServer database name.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    ip String
    Mongo DB connection ip.
    password String
    MongoDB database password.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    schemas List<Property Map>
    ClickHouse schema.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    type String
    Resource Type.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema

    AllowNull bool
    Whether the message is allowed to be empty.
    ColumnName string
    Column Name.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    ColumnName string
    Column Name.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    columnName String
    Column Name.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    columnName string
    Column Name.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    column_name str
    Column Name.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    columnName String
    Column Name.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceClsParam

    ContentKey string
    key for data in non-json format.
    DecodeJson bool
    Whether the produced information is in json format.
    LogSet string
    LogSet id.
    Resource string
    Resource.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    ContentKey string
    key for data in non-json format.
    DecodeJson bool
    Whether the produced information is in json format.
    LogSet string
    LogSet id.
    Resource string
    Resource.
    TimeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey String
    key for data in non-json format.
    decodeJson Boolean
    Whether the produced information is in json format.
    logSet String
    LogSet id.
    resource String
    Resource.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey string
    key for data in non-json format.
    decodeJson boolean
    Whether the produced information is in json format.
    logSet string
    LogSet id.
    resource string
    Resource.
    timeField string
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    content_key str
    key for data in non-json format.
    decode_json bool
    Whether the produced information is in json format.
    log_set str
    LogSet id.
    resource str
    Resource.
    time_field str
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
    contentKey String
    key for data in non-json format.
    decodeJson Boolean
    Whether the produced information is in json format.
    logSet String
    LogSet id.
    resource String
    Resource.
    timeField String
    Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.

    GetCkafkaDatahubTaskTaskListTargetResourceCosParam

    AggregateBatchSize double
    The size of aggregated messages MB.
    AggregateInterval double
    time interval.
    BucketName string
    cos bucket name.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    Region string
    region code.
    AggregateBatchSize float64
    The size of aggregated messages MB.
    AggregateInterval float64
    time interval.
    BucketName string
    cos bucket name.
    DirectoryTimeFormat string
    Partition format formatted according to strptime time.
    FormatOutputType string
    The file format after message aggregation csv|json.
    ObjectKey string
    ObjectKey.
    ObjectKeyPrefix string
    Dumped object directory prefix.
    Region string
    region code.
    aggregateBatchSize Double
    The size of aggregated messages MB.
    aggregateInterval Double
    time interval.
    bucketName String
    cos bucket name.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    region String
    region code.
    aggregateBatchSize number
    The size of aggregated messages MB.
    aggregateInterval number
    time interval.
    bucketName string
    cos bucket name.
    directoryTimeFormat string
    Partition format formatted according to strptime time.
    formatOutputType string
    The file format after message aggregation csv|json.
    objectKey string
    ObjectKey.
    objectKeyPrefix string
    Dumped object directory prefix.
    region string
    region code.
    aggregate_batch_size float
    The size of aggregated messages MB.
    aggregate_interval float
    time interval.
    bucket_name str
    cos bucket name.
    directory_time_format str
    Partition format formatted according to strptime time.
    format_output_type str
    The file format after message aggregation csv|json.
    object_key str
    ObjectKey.
    object_key_prefix str
    Dumped object directory prefix.
    region str
    region code.
    aggregateBatchSize Number
    The size of aggregated messages MB.
    aggregateInterval Number
    time interval.
    bucketName String
    cos bucket name.
    directoryTimeFormat String
    Partition format formatted according to strptime time.
    formatOutputType String
    The file format after message aggregation csv|json.
    objectKey String
    ObjectKey.
    objectKeyPrefix String
    Dumped object directory prefix.
    region String
    region code.

    GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam

    CtsdbMetric string
    Ctsdb metric.
    Resource string
    Resource.
    CtsdbMetric string
    Ctsdb metric.
    Resource string
    Resource.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    Resource.
    ctsdbMetric string
    Ctsdb metric.
    resource string
    Resource.
    ctsdb_metric str
    Ctsdb metric.
    resource str
    Resource.
    ctsdbMetric String
    Ctsdb metric.
    resource String
    Resource.

    GetCkafkaDatahubTaskTaskListTargetResourceDtsParam

    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Mongo DB connection ip.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    Topic string
    Topic name, use , when more than 1 topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    GroupId string
    Dts consumer group Id.
    GroupPassword string
    Dts consumer group passwd.
    GroupUser string
    Dts account.
    Ip string
    Mongo DB connection ip.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    Topic string
    Topic name, use , when more than 1 topic.
    TranSql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Mongo DB connection ip.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    topic String
    Topic name, use , when more than 1 topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId string
    Dts consumer group Id.
    groupPassword string
    Dts consumer group passwd.
    groupUser string
    Dts account.
    ip string
    Mongo DB connection ip.
    port number
    MongoDB connection port.
    resource string
    Resource.
    topic string
    Topic name, use , when more than 1 topic.
    tranSql boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    group_id str
    Dts consumer group Id.
    group_password str
    Dts consumer group passwd.
    group_user str
    Dts account.
    ip str
    Mongo DB connection ip.
    port float
    MongoDB connection port.
    resource str
    Resource.
    topic str
    Topic name, use , when more than 1 topic.
    tran_sql bool
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
    groupId String
    Dts consumer group Id.
    groupPassword String
    Dts consumer group passwd.
    groupUser String
    Dts account.
    ip String
    Mongo DB connection ip.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    topic String
    Topic name, use , when more than 1 topic.
    tranSql Boolean
    False to synchronize the original data, true to synchronize the parsed json format data, the default is true.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParam

    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlqs List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq>
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    MongoDB database password.
    Port double
    MongoDB connection port.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    ContentKey string
    key for data in non-json format.
    DatabasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    DateFormat string
    Es date suffix.
    DocumentIdField string
    The field name of the document ID value dumped into Es.
    DropCls []GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropDlqs []GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq
    dead letter queue.
    DropInvalidJsonMessage bool
    Whether Es discards messages in non-json format.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    Index string
    Es index name.
    IndexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    Password string
    MongoDB database password.
    Port float64
    MongoDB connection port.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    ServiceVip string
    instance vip.
    UniqVpcId string
    instance vpc id.
    UserName string
    MongoDB database user name.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq>
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    MongoDB database password.
    port Double
    MongoDB connection port.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.
    contentKey string
    key for data in non-json format.
    databasePrimaryKey string
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat string
    Es date suffix.
    documentIdField string
    The field name of the document ID value dumped into Es.
    dropCls GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq[]
    dead letter queue.
    dropInvalidJsonMessage boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    index string
    Es index name.
    indexType string
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password string
    MongoDB database password.
    port number
    MongoDB connection port.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    serviceVip string
    instance vip.
    uniqVpcId string
    instance vpc id.
    userName string
    MongoDB database user name.
    content_key str
    key for data in non-json format.
    database_primary_key str
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    date_format str
    Es date suffix.
    document_id_field str
    The field name of the document ID value dumped into Es.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_dlqs Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq]
    dead letter queue.
    drop_invalid_json_message bool
    Whether Es discards messages in non-json format.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    index str
    Es index name.
    index_type str
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password str
    MongoDB database password.
    port float
    MongoDB connection port.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    service_vip str
    instance vip.
    uniq_vpc_id str
    instance vpc id.
    user_name str
    MongoDB database user name.
    contentKey String
    key for data in non-json format.
    databasePrimaryKey String
    When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
    dateFormat String
    Es date suffix.
    documentIdField String
    The field name of the document ID value dumped into Es.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropDlqs List<Property Map>
    dead letter queue.
    dropInvalidJsonMessage Boolean
    Whether Es discards messages in non-json format.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    index String
    Es index name.
    indexType String
    Es custom index name type, STRING, JSONPATH, the default is STRING.
    password String
    MongoDB database password.
    port Number
    MongoDB connection port.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    serviceVip String
    instance vip.
    uniqVpcId String
    instance vpc id.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq

    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    MaxRetryAttempts double
    retry times.
    RetryInterval double
    retry interval.
    TopicParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam>
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    DlqType string
    dlq type, CKAFKA|TOPIC.
    KafkaParams []GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam
    ckafka configuration, required when Type is KAFKA.
    MaxRetryAttempts float64
    retry times.
    RetryInterval float64
    retry interval.
    TopicParams []GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam
    Topic configuration, Required when Type is Topic.
    Type string
    Resource Type.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam>
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts Double
    retry times.
    retryInterval Double
    retry interval.
    topicParams List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.
    dlqType string
    dlq type, CKAFKA|TOPIC.
    kafkaParams GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam[]
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts number
    retry times.
    retryInterval number
    retry interval.
    topicParams GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam[]
    Topic configuration, Required when Type is Topic.
    type string
    Resource Type.
    dlq_type str
    dlq type, CKAFKA|TOPIC.
    kafka_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam]
    ckafka configuration, required when Type is KAFKA.
    max_retry_attempts float
    retry times.
    retry_interval float
    retry interval.
    topic_params Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam]
    Topic configuration, Required when Type is Topic.
    type str
    Resource Type.
    dlqType String
    dlq type, CKAFKA|TOPIC.
    kafkaParams List<Property Map>
    ckafka configuration, required when Type is KAFKA.
    maxRetryAttempts Number
    retry times.
    retryInterval Number
    retry interval.
    topicParams List<Property Map>
    Topic configuration, Required when Type is Topic.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings []GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType string
    ConnectorSyncType.
    enableToleration boolean
    enable dead letter queue.
    keepPartition boolean
    KeepPartition.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resource string
    Resource.
    resourceName string
    instance name.
    selfBuilt boolean
    Whether it is a self-built cluster.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connector_sync_type str
    ConnectorSyncType.
    enable_toleration bool
    enable dead letter queue.
    keep_partition bool
    KeepPartition.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource str
    Resource.
    resource_name str
    instance name.
    self_built bool
    Whether it is a self-built cluster.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    table_mappings Sequence[GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping

    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    database string
    SQLServer database name.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    database str
    SQLServer database name.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.

    GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource string
    Resource.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource str
    Resource.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam

    FunctionName string
    SCF function name.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    Resource Type.
    FunctionName string
    SCF function name.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    Type string
    Resource Type.
    functionName String
    SCF function name.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    Resource Type.
    functionName string
    SCF function name.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    type string
    Resource Type.
    function_name str
    SCF function name.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    type str
    Resource Type.
    functionName String
    SCF function name.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum double
    the partition num of the topic.
    QpsLimit double
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings List<GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId double
    Zone ID.
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    ConnectorSyncType string
    ConnectorSyncType.
    EnableToleration bool
    enable dead letter queue.
    KeepPartition bool
    KeepPartition.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    PartitionNum float64
    the partition num of the topic.
    QpsLimit float64
    Qps(query per seconds) limit.
    Resource string
    Resource.
    ResourceName string
    instance name.
    SelfBuilt bool
    Whether it is a self-built cluster.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TableMappings []GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping
    maps of table to topic, required when multi topic is selected.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    UseTableMapping bool
    whether to use multi table.
    ZoneId float64
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Double
    the partition num of the topic.
    qpsLimit Double
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Double
    Zone ID.
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType string
    ConnectorSyncType.
    enableToleration boolean
    enable dead letter queue.
    keepPartition boolean
    KeepPartition.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum number
    the partition num of the topic.
    qpsLimit number
    Qps(query per seconds) limit.
    resource string
    Resource.
    resourceName string
    instance name.
    selfBuilt boolean
    Whether it is a self-built cluster.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping[]
    maps of table to topic, required when multi topic is selected.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping boolean
    whether to use multi table.
    zoneId number
    Zone ID.
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connector_sync_type str
    ConnectorSyncType.
    enable_toleration bool
    enable dead letter queue.
    keep_partition bool
    KeepPartition.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partition_num float
    the partition num of the topic.
    qps_limit float
    Qps(query per seconds) limit.
    resource str
    Resource.
    resource_name str
    instance name.
    self_built bool
    Whether it is a self-built cluster.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    table_mappings Sequence[GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping]
    maps of table to topic, required when multi topic is selected.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    use_table_mapping bool
    whether to use multi table.
    zone_id float
    Zone ID.
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    connectorSyncType String
    ConnectorSyncType.
    enableToleration Boolean
    enable dead letter queue.
    keepPartition Boolean
    KeepPartition.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    partitionNum Number
    the partition num of the topic.
    qpsLimit Number
    Qps(query per seconds) limit.
    resource String
    Resource.
    resourceName String
    instance name.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    tableMappings List<Property Map>
    maps of table to topic, required when multi topic is selected.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    useTableMapping Boolean
    whether to use multi table.
    zoneId Number
    Zone ID.

    GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping

    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    Database string
    SQLServer database name.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Topic string
    Topic name, use , when more than 1 topic.
    TopicId string
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.
    database string
    SQLServer database name.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic string
    Topic name, use , when more than 1 topic.
    topicId string
    Topic TopicId.
    database str
    SQLServer database name.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic str
    Topic name, use , when more than 1 topic.
    topic_id str
    Topic TopicId.
    database String
    SQLServer database name.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic String
    Topic name, use , when more than 1 topic.
    topicId String
    Topic TopicId.

    GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam

    Database string
    SQLServer database name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Database string
    SQLServer database name.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database string
    SQLServer database name.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database str
    SQLServer database name.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam

    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    SQLServer database name.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port double
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    Collection string
    MongoDB collection.
    CopyExisting bool
    Whether to copy the stock data, the default parameter is true.
    Database string
    SQLServer database name.
    Ip string
    Mongo DB connection ip.
    ListeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    Password string
    MongoDB database password.
    Pipeline string
    aggregation pipeline.
    Port float64
    MongoDB connection port.
    ReadPreference string
    Master-slave priority, default master node.
    Resource string
    Resource.
    SelfBuilt bool
    Whether it is a self-built cluster.
    UserName string
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    SQLServer database name.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Double
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.
    collection string
    MongoDB collection.
    copyExisting boolean
    Whether to copy the stock data, the default parameter is true.
    database string
    SQLServer database name.
    ip string
    Mongo DB connection ip.
    listeningEvent string
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password string
    MongoDB database password.
    pipeline string
    aggregation pipeline.
    port number
    MongoDB connection port.
    readPreference string
    Master-slave priority, default master node.
    resource string
    Resource.
    selfBuilt boolean
    Whether it is a self-built cluster.
    userName string
    MongoDB database user name.
    collection str
    MongoDB collection.
    copy_existing bool
    Whether to copy the stock data, the default parameter is true.
    database str
    SQLServer database name.
    ip str
    Mongo DB connection ip.
    listening_event str
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password str
    MongoDB database password.
    pipeline str
    aggregation pipeline.
    port float
    MongoDB connection port.
    read_preference str
    Master-slave priority, default master node.
    resource str
    Resource.
    self_built bool
    Whether it is a self-built cluster.
    user_name str
    MongoDB database user name.
    collection String
    MongoDB collection.
    copyExisting Boolean
    Whether to copy the stock data, the default parameter is true.
    database String
    SQLServer database name.
    ip String
    Mongo DB connection ip.
    listeningEvent String
    Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
    password String
    MongoDB database password.
    pipeline String
    aggregation pipeline.
    port Number
    MongoDB connection port.
    readPreference String
    Master-slave priority, default master node.
    resource String
    Resource.
    selfBuilt Boolean
    Whether it is a self-built cluster.
    userName String
    MongoDB database user name.

    GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam

    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    DataSourceIncrementColumn string
    the name of the column to be monitored.
    DataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    DataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    DataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    DataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DdlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    DropCls []GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IncludeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    IncludeQuery bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    IsTablePrefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    OutputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SignalDatabase string
    database name of signal table.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    TopicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    TopicReplacement string
    TopicRegex, $1, $2.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls List<GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.
    dataSourceIncrementColumn string
    the name of the column to be monitored.
    dataSourceIncrementMode string
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode string
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource string
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom string
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    database string
    SQLServer database name.
    ddlTopic string
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl[]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges string
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat string
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    signalDatabase string
    database name of signal table.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex string
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement string
    TopicRegex, $1, $2.
    data_source_increment_column str
    the name of the column to be monitored.
    data_source_increment_mode str
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    data_source_monitor_mode str
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    data_source_monitor_resource str
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    data_source_start_from str
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    database str
    SQLServer database name.
    ddl_topic str
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    drop_cls Sequence[GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl]
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    include_content_changes str
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    include_query bool
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    is_table_prefix bool
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    output_format str
    output format, DEFAULT, CANAL_1, CANAL_2.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    signal_database str
    database name of signal table.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topic_regex str
    Regular expression for routing events to specific topics, defaults to (.*).
    topic_replacement str
    TopicRegex, $1, $2.
    dataSourceIncrementColumn String
    the name of the column to be monitored.
    dataSourceIncrementMode String
    TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
    dataSourceMonitorMode String
    TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
    dataSourceMonitorResource String
    When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
    dataSourceStartFrom String
    HEAD means copy stock + incremental data, TAIL means copy only incremental data.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    ddlTopic String
    The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
    dropCls List<Property Map>
    When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    includeContentChanges String
    If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
    includeQuery Boolean
    If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
    isTablePrefix Boolean
    When the Table input is a prefix, the value of this item is true, otherwise it is false.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    outputFormat String
    output format, DEFAULT, CANAL_1, CANAL_2.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    signalDatabase String
    database name of signal table.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    topicRegex String
    Regular expression for routing events to specific topics, defaults to (.*).
    topicReplacement String
    TopicRegex, $1, $2.

    GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl

    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    DropClsLogSet string
    cls LogSet id.
    DropClsOwneruin string
    account.
    DropClsRegion string
    The region where the cls is delivered.
    DropClsTopicId string
    cls topic.
    DropInvalidMessageToCls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.
    dropClsLogSet string
    cls LogSet id.
    dropClsOwneruin string
    account.
    dropClsRegion string
    The region where the cls is delivered.
    dropClsTopicId string
    cls topic.
    dropInvalidMessageToCls boolean
    Whether to deliver to cls.
    drop_cls_log_set str
    cls LogSet id.
    drop_cls_owneruin str
    account.
    drop_cls_region str
    The region where the cls is delivered.
    drop_cls_topic_id str
    cls topic.
    drop_invalid_message_to_cls bool
    Whether to deliver to cls.
    dropClsLogSet String
    cls LogSet id.
    dropClsOwneruin String
    account.
    dropClsRegion String
    The region where the cls is delivered.
    dropClsTopicId String
    cls topic.
    dropInvalidMessageToCls Boolean
    Whether to deliver to cls.

    GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam

    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    DataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    DataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    DataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    DataTargetRecordMappings []GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping
    Mapping relationship between tables and messages.
    Database string
    SQLServer database name.
    DropInvalidMessage bool
    Whether to discard messages that fail to parse, the default is true.
    IsTableRegular bool
    Whether the input table is a regular expression.
    KeyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    PluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    RecordWithSchema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat string
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode string
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField string
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping[]
    Mapping relationship between tables and messages.
    database string
    SQLServer database name.
    dropInvalidMessage boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular boolean
    Whether the input table is a regular expression.
    keyColumns string
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName string
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    data_format str
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    data_target_insert_mode str
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    data_target_primary_key_field str
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    data_target_record_mappings Sequence[GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping]
    Mapping relationship between tables and messages.
    database str
    SQLServer database name.
    drop_invalid_message bool
    Whether to discard messages that fail to parse, the default is true.
    is_table_regular bool
    Whether the input table is a regular expression.
    key_columns str
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    plugin_name str
    (decoderbufs/pgoutput), default decoderbufs.
    record_with_schema bool
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    dataFormat String
    Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
    dataTargetInsertMode String
    INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
    dataTargetPrimaryKeyField String
    When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
    dataTargetRecordMappings List<Property Map>
    Mapping relationship between tables and messages.
    database String
    SQLServer database name.
    dropInvalidMessage Boolean
    Whether to discard messages that fail to parse, the default is true.
    isTableRegular Boolean
    Whether the input table is a regular expression.
    keyColumns String
    Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
    pluginName String
    (decoderbufs/pgoutput), default decoderbufs.
    recordWithSchema Boolean
    If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping

    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    AllowNull bool
    Whether the message is allowed to be empty.
    AutoIncrement bool
    Whether it is an auto-increment column.
    ColumnName string
    Column Name.
    ColumnSize string
    current ColumnSize.
    DecimalDigits string
    current Column DecimalDigits.
    DefaultValue string
    Database table default parameters.
    ExtraInfo string
    Database table extra fields.
    JsonKey string
    The key name of the message.
    Type string
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.
    allowNull boolean
    Whether the message is allowed to be empty.
    autoIncrement boolean
    Whether it is an auto-increment column.
    columnName string
    Column Name.
    columnSize string
    current ColumnSize.
    decimalDigits string
    current Column DecimalDigits.
    defaultValue string
    Database table default parameters.
    extraInfo string
    Database table extra fields.
    jsonKey string
    The key name of the message.
    type string
    Resource Type.
    allow_null bool
    Whether the message is allowed to be empty.
    auto_increment bool
    Whether it is an auto-increment column.
    column_name str
    Column Name.
    column_size str
    current ColumnSize.
    decimal_digits str
    current Column DecimalDigits.
    default_value str
    Database table default parameters.
    extra_info str
    Database table extra fields.
    json_key str
    The key name of the message.
    type str
    Resource Type.
    allowNull Boolean
    Whether the message is allowed to be empty.
    autoIncrement Boolean
    Whether it is an auto-increment column.
    columnName String
    Column Name.
    columnSize String
    current ColumnSize.
    decimalDigits String
    current Column DecimalDigits.
    defaultValue String
    Database table default parameters.
    extraInfo String
    Database table extra fields.
    jsonKey String
    The key name of the message.
    type String
    Resource Type.

    GetCkafkaDatahubTaskTaskListTargetResourceScfParam

    BatchSize double
    The maximum number of messages sent in each batch, the default is 1000.
    FunctionName string
    SCF function name.
    MaxRetries double
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    BatchSize float64
    The maximum number of messages sent in each batch, the default is 1000.
    FunctionName string
    SCF function name.
    MaxRetries float64
    The number of retries after the SCF call fails, the default is 5.
    Namespace string
    SCF cloud function namespace, the default is default.
    Qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize Double
    The maximum number of messages sent in each batch, the default is 1000.
    functionName String
    SCF function name.
    maxRetries Double
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize number
    The maximum number of messages sent in each batch, the default is 1000.
    functionName string
    SCF function name.
    maxRetries number
    The number of retries after the SCF call fails, the default is 5.
    namespace string
    SCF cloud function namespace, the default is default.
    qualifier string
    SCF cloud function version and alias, the default is DEFAULT.
    batch_size float
    The maximum number of messages sent in each batch, the default is 1000.
    function_name str
    SCF function name.
    max_retries float
    The number of retries after the SCF call fails, the default is 5.
    namespace str
    SCF cloud function namespace, the default is default.
    qualifier str
    SCF cloud function version and alias, the default is DEFAULT.
    batchSize Number
    The maximum number of messages sent in each batch, the default is 1000.
    functionName String
    SCF function name.
    maxRetries Number
    The number of retries after the SCF call fails, the default is 5.
    namespace String
    SCF cloud function namespace, the default is default.
    qualifier String
    SCF cloud function version and alias, the default is DEFAULT.

    GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam

    Database string
    SQLServer database name.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    Database string
    SQLServer database name.
    Resource string
    Resource.
    SnapshotMode string
    schema_only|initial default initial.
    Table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database string
    SQLServer database name.
    resource string
    Resource.
    snapshotMode string
    schema_only|initial default initial.
    table string
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database str
    SQLServer database name.
    resource str
    Resource.
    snapshot_mode str
    schema_only|initial default initial.
    table str
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
    database String
    SQLServer database name.
    resource String
    Resource.
    snapshotMode String
    schema_only|initial default initial.
    table String
    SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.

    GetCkafkaDatahubTaskTaskListTargetResourceTdwParam

    Bid string
    Tdw bid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort double
    TDW port, default 8099.
    Tid string
    Tdw tid.
    Bid string
    Tdw bid.
    IsDomestic bool
    default true.
    TdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    TdwPort float64
    TDW port, default 8099.
    Tid string
    Tdw tid.
    bid String
    Tdw bid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Double
    TDW port, default 8099.
    tid String
    Tdw tid.
    bid string
    Tdw bid.
    isDomestic boolean
    default true.
    tdwHost string
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort number
    TDW port, default 8099.
    tid string
    Tdw tid.
    bid str
    Tdw bid.
    is_domestic bool
    default true.
    tdw_host str
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdw_port float
    TDW port, default 8099.
    tid str
    Tdw tid.
    bid String
    Tdw bid.
    isDomestic Boolean
    default true.
    tdwHost String
    TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
    tdwPort Number
    TDW port, default 8099.
    tid String
    Tdw tid.

    GetCkafkaDatahubTaskTaskListTargetResourceTopicParam

    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    CompressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    MsgMultiple float64
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    OffsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    Resource string
    Resource.
    StartTime float64
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    TopicId string
    Topic TopicId.
    UseAutoCreateTopic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Double
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Double
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType string
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType string
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource string
    Resource.
    startTime number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId string
    Topic TopicId.
    useAutoCreateTopic boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compression_type str
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msg_multiple float
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offset_type str
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource str
    Resource.
    start_time float
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topic_id str
    Topic TopicId.
    use_auto_create_topic bool
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
    compressionType String
    Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
    msgMultiple Number
    1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
    offsetType String
    Offset type, initial position earliest, latest position latest, time point position timestamp.
    resource String
    Resource.
    startTime Number
    It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
    topicId String
    Topic TopicId.
    useAutoCreateTopic Boolean
    whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).

    Package Details

    Repository
    tencentcloud tencentcloudstack/terraform-provider-tencentcloud
    License
    Notes
    This Pulumi package is based on the tencentcloud Terraform Provider.
    tencentcloud logo
    tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack