tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack
tencentcloud.getCkafkaDatahubTask
Explore with Pulumi AI
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack
Use this data source to query detailed information of ckafka datahub_task
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const datahubTask = tencentcloud.getCkafkaDatahubTask({});
import pulumi
import pulumi_tencentcloud as tencentcloud
datahub_task = tencentcloud.get_ckafka_datahub_task()
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := tencentcloud.LookupCkafkaDatahubTask(ctx, &tencentcloud.LookupCkafkaDatahubTaskArgs{}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var datahubTask = Tencentcloud.GetCkafkaDatahubTask.Invoke();
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.TencentcloudFunctions;
import com.pulumi.tencentcloud.inputs.GetCkafkaDatahubTaskArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var datahubTask = TencentcloudFunctions.getCkafkaDatahubTask();
}
}
variables:
datahubTask:
fn::invoke:
function: tencentcloud:getCkafkaDatahubTask
arguments: {}
Using getCkafkaDatahubTask
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getCkafkaDatahubTask(args: GetCkafkaDatahubTaskArgs, opts?: InvokeOptions): Promise<GetCkafkaDatahubTaskResult>
function getCkafkaDatahubTaskOutput(args: GetCkafkaDatahubTaskOutputArgs, opts?: InvokeOptions): Output<GetCkafkaDatahubTaskResult>
def get_ckafka_datahub_task(id: Optional[str] = None,
resource: Optional[str] = None,
result_output_file: Optional[str] = None,
search_word: Optional[str] = None,
source_type: Optional[str] = None,
target_type: Optional[str] = None,
task_type: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetCkafkaDatahubTaskResult
def get_ckafka_datahub_task_output(id: Optional[pulumi.Input[str]] = None,
resource: Optional[pulumi.Input[str]] = None,
result_output_file: Optional[pulumi.Input[str]] = None,
search_word: Optional[pulumi.Input[str]] = None,
source_type: Optional[pulumi.Input[str]] = None,
target_type: Optional[pulumi.Input[str]] = None,
task_type: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetCkafkaDatahubTaskResult]
func LookupCkafkaDatahubTask(ctx *Context, args *LookupCkafkaDatahubTaskArgs, opts ...InvokeOption) (*LookupCkafkaDatahubTaskResult, error)
func LookupCkafkaDatahubTaskOutput(ctx *Context, args *LookupCkafkaDatahubTaskOutputArgs, opts ...InvokeOption) LookupCkafkaDatahubTaskResultOutput
> Note: This function is named LookupCkafkaDatahubTask
in the Go SDK.
public static class GetCkafkaDatahubTask
{
public static Task<GetCkafkaDatahubTaskResult> InvokeAsync(GetCkafkaDatahubTaskArgs args, InvokeOptions? opts = null)
public static Output<GetCkafkaDatahubTaskResult> Invoke(GetCkafkaDatahubTaskInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetCkafkaDatahubTaskResult> getCkafkaDatahubTask(GetCkafkaDatahubTaskArgs args, InvokeOptions options)
public static Output<GetCkafkaDatahubTaskResult> getCkafkaDatahubTask(GetCkafkaDatahubTaskArgs args, InvokeOptions options)
fn::invoke:
function: tencentcloud:index/getCkafkaDatahubTask:getCkafkaDatahubTask
arguments:
# arguments dictionary
The following arguments are supported:
- Id string
- Resource string
- Resource.
- Result
Output stringFile - Used to save results.
- Search
Word string - search key.
- Source
Type string - The source type.
- Target
Type string - Destination type of dump.
- Task
Type string - Task type, SOURCE|SINK.
- Id string
- Resource string
- Resource.
- Result
Output stringFile - Used to save results.
- Search
Word string - search key.
- Source
Type string - The source type.
- Target
Type string - Destination type of dump.
- Task
Type string - Task type, SOURCE|SINK.
- id String
- resource String
- Resource.
- result
Output StringFile - Used to save results.
- search
Word String - search key.
- source
Type String - The source type.
- target
Type String - Destination type of dump.
- task
Type String - Task type, SOURCE|SINK.
- id string
- resource string
- Resource.
- result
Output stringFile - Used to save results.
- search
Word string - search key.
- source
Type string - The source type.
- target
Type string - Destination type of dump.
- task
Type string - Task type, SOURCE|SINK.
- id str
- resource str
- Resource.
- result_
output_ strfile - Used to save results.
- search_
word str - search key.
- source_
type str - The source type.
- target_
type str - Destination type of dump.
- task_
type str - Task type, SOURCE|SINK.
- id String
- resource String
- Resource.
- result
Output StringFile - Used to save results.
- search
Word String - search key.
- source
Type String - The source type.
- target
Type String - Destination type of dump.
- task
Type String - Task type, SOURCE|SINK.
getCkafkaDatahubTask Result
The following output properties are available:
- Id string
- Task
Lists List<GetCkafka Datahub Task Task List> - Datahub task information list.
- Resource string
- The topic name of the topic sold separately.
- Result
Output stringFile - Search
Word string - Source
Type string - Target
Type string - Task
Type string - TaskType, SOURCE|SINK.
- Id string
- Task
Lists []GetCkafka Datahub Task Task List - Datahub task information list.
- Resource string
- The topic name of the topic sold separately.
- Result
Output stringFile - Search
Word string - Source
Type string - Target
Type string - Task
Type string - TaskType, SOURCE|SINK.
- id String
- task
Lists List<GetCkafka Datahub Task Task List> - Datahub task information list.
- resource String
- The topic name of the topic sold separately.
- result
Output StringFile - search
Word String - source
Type String - target
Type String - task
Type String - TaskType, SOURCE|SINK.
- id string
- task
Lists GetCkafka Datahub Task Task List[] - Datahub task information list.
- resource string
- The topic name of the topic sold separately.
- result
Output stringFile - search
Word string - source
Type string - target
Type string - task
Type string - TaskType, SOURCE|SINK.
- id str
- task_
lists Sequence[GetCkafka Datahub Task Task List] - Datahub task information list.
- resource str
- The topic name of the topic sold separately.
- result_
output_ strfile - search_
word str - source_
type str - target_
type str - task_
type str - TaskType, SOURCE|SINK.
- id String
- task
Lists List<Property Map> - Datahub task information list.
- resource String
- The topic name of the topic sold separately.
- result
Output StringFile - search
Word String - source
Type String - target
Type String - task
Type String - TaskType, SOURCE|SINK.
Supporting Types
GetCkafkaDatahubTaskTaskList
- Create
Time string - CreateTime.
- Datahub
Id string - Datahub Id.
- Error
Message string - ErrorMessage.
- Source
Resources List<GetCkafka Datahub Task Task List Source Resource> - data resource.
- Status double
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- Step
Lists List<string> - StepList.
- Target
Resources List<GetCkafka Datahub Task Task List Target Resource> - Target Resource.
- Task
Current stringStep - Task Current Step.
- Task
Id string - task ID.
- Task
Name string - TaskName.
- Task
Progress double - Creation progress percentage.
- Task
Type string - Task type, SOURCE|SINK.
- Create
Time string - CreateTime.
- Datahub
Id string - Datahub Id.
- Error
Message string - ErrorMessage.
- Source
Resources []GetCkafka Datahub Task Task List Source Resource - data resource.
- Status float64
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- Step
Lists []string - StepList.
- Target
Resources []GetCkafka Datahub Task Task List Target Resource - Target Resource.
- Task
Current stringStep - Task Current Step.
- Task
Id string - task ID.
- Task
Name string - TaskName.
- Task
Progress float64 - Creation progress percentage.
- Task
Type string - Task type, SOURCE|SINK.
- create
Time String - CreateTime.
- datahub
Id String - Datahub Id.
- error
Message String - ErrorMessage.
- source
Resources List<GetCkafka Datahub Task Task List Source Resource> - data resource.
- status Double
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- step
Lists List<String> - StepList.
- target
Resources List<GetCkafka Datahub Task Task List Target Resource> - Target Resource.
- task
Current StringStep - Task Current Step.
- task
Id String - task ID.
- task
Name String - TaskName.
- task
Progress Double - Creation progress percentage.
- task
Type String - Task type, SOURCE|SINK.
- create
Time string - CreateTime.
- datahub
Id string - Datahub Id.
- error
Message string - ErrorMessage.
- source
Resources GetCkafka Datahub Task Task List Source Resource[] - data resource.
- status number
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- step
Lists string[] - StepList.
- target
Resources GetCkafka Datahub Task Task List Target Resource[] - Target Resource.
- task
Current stringStep - Task Current Step.
- task
Id string - task ID.
- task
Name string - TaskName.
- task
Progress number - Creation progress percentage.
- task
Type string - Task type, SOURCE|SINK.
- create_
time str - CreateTime.
- datahub_
id str - Datahub Id.
- error_
message str - ErrorMessage.
- source_
resources Sequence[GetCkafka Datahub Task Task List Source Resource] - data resource.
- status float
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- step_
lists Sequence[str] - StepList.
- target_
resources Sequence[GetCkafka Datahub Task Task List Target Resource] - Target Resource.
- task_
current_ strstep - Task Current Step.
- task_
id str - task ID.
- task_
name str - TaskName.
- task_
progress float - Creation progress percentage.
- task_
type str - Task type, SOURCE|SINK.
- create
Time String - CreateTime.
- datahub
Id String - Datahub Id.
- error
Message String - ErrorMessage.
- source
Resources List<Property Map> - data resource.
- status Number
- Status, -1 failed to create, 0 to create, 1 to run, 2 to delete, 3 to deleted, 4 to delete failed, 5 to pause, 6 to pause, 7 to pause, 8 to resume, 9 to resume failed.
- step
Lists List<String> - StepList.
- target
Resources List<Property Map> - Target Resource.
- task
Current StringStep - Task Current Step.
- task
Id String - task ID.
- task
Name String - TaskName.
- task
Progress Number - Creation progress percentage.
- task
Type String - Task type, SOURCE|SINK.
GetCkafkaDatahubTaskTaskListSourceResource
- Click
House List<GetParams Ckafka Datahub Task Task List Source Resource Click House Param> - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Params List<GetCkafka Datahub Task Task List Source Resource Cls Param> - Cls configuration, Required when Type is CLS.
- Cos
Params List<GetCkafka Datahub Task Task List Source Resource Cos Param> - Cos configuration, required when Type is COS.
- Ctsdb
Params List<GetCkafka Datahub Task Task List Source Resource Ctsdb Param> - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Params List<GetCkafka Datahub Task Task List Source Resource Dts Param> - Dts configuration, required when Type is DTS.
- Es
Params List<GetCkafka Datahub Task Task List Source Resource Es Param> - Es configuration, required when Type is ES.
- Event
Bus List<GetParams Ckafka Datahub Task Task List Source Resource Event Bus Param> - EB configuration, required when type is EB.
- Kafka
Params List<GetCkafka Datahub Task Task List Source Resource Kafka Param> - ckafka configuration, required when Type is KAFKA.
- Maria
Db List<GetParams Ckafka Datahub Task Task List Source Resource Maria Db Param> - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db List<GetParams Ckafka Datahub Task Task List Source Resource Mongo Db Param> - MongoDB config, Required when Type is MONGODB.
- My
Sql List<GetParams Ckafka Datahub Task Task List Source Resource My Sql Param> - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql List<GetParams Ckafka Datahub Task Task List Source Resource Postgre Sql Param> - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Params List<GetCkafka Datahub Task Task List Source Resource Scf Param> - Scf configuration, Required when Type is SCF.
- Sql
Server List<GetParams Ckafka Datahub Task Task List Source Resource Sql Server Param> - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Params List<GetCkafka Datahub Task Task List Source Resource Tdw Param> - Tdw configuration, required when Type is TDW.
- Topic
Params List<GetCkafka Datahub Task Task List Source Resource Topic Param> - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- Click
House []GetParams Ckafka Datahub Task Task List Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Params []GetCkafka Datahub Task Task List Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Params []GetCkafka Datahub Task Task List Source Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Params []GetCkafka Datahub Task Task List Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Params []GetCkafka Datahub Task Task List Source Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Params []GetCkafka Datahub Task Task List Source Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus []GetParams Ckafka Datahub Task Task List Source Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Params []GetCkafka Datahub Task Task List Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db []GetParams Ckafka Datahub Task Task List Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db []GetParams Ckafka Datahub Task Task List Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql []GetParams Ckafka Datahub Task Task List Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql []GetParams Ckafka Datahub Task Task List Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Params []GetCkafka Datahub Task Task List Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server []GetParams Ckafka Datahub Task Task List Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Params []GetCkafka Datahub Task Task List Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Params []GetCkafka Datahub Task Task List Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- click
House List<GetParams Ckafka Datahub Task Task List Source Resource Click House Param> - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params List<GetCkafka Datahub Task Task List Source Resource Cls Param> - Cls configuration, Required when Type is CLS.
- cos
Params List<GetCkafka Datahub Task Task List Source Resource Cos Param> - Cos configuration, required when Type is COS.
- ctsdb
Params List<GetCkafka Datahub Task Task List Source Resource Ctsdb Param> - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params List<GetCkafka Datahub Task Task List Source Resource Dts Param> - Dts configuration, required when Type is DTS.
- es
Params List<GetCkafka Datahub Task Task List Source Resource Es Param> - Es configuration, required when Type is ES.
- event
Bus List<GetParams Ckafka Datahub Task Task List Source Resource Event Bus Param> - EB configuration, required when type is EB.
- kafka
Params List<GetCkafka Datahub Task Task List Source Resource Kafka Param> - ckafka configuration, required when Type is KAFKA.
- maria
Db List<GetParams Ckafka Datahub Task Task List Source Resource Maria Db Param> - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db List<GetParams Ckafka Datahub Task Task List Source Resource Mongo Db Param> - MongoDB config, Required when Type is MONGODB.
- my
Sql List<GetParams Ckafka Datahub Task Task List Source Resource My Sql Param> - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql List<GetParams Ckafka Datahub Task Task List Source Resource Postgre Sql Param> - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params List<GetCkafka Datahub Task Task List Source Resource Scf Param> - Scf configuration, Required when Type is SCF.
- sql
Server List<GetParams Ckafka Datahub Task Task List Source Resource Sql Server Param> - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params List<GetCkafka Datahub Task Task List Source Resource Tdw Param> - Tdw configuration, required when Type is TDW.
- topic
Params List<GetCkafka Datahub Task Task List Source Resource Topic Param> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- click
House GetParams Ckafka Datahub Task Task List Source Resource Click House Param[] - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params GetCkafka Datahub Task Task List Source Resource Cls Param[] - Cls configuration, Required when Type is CLS.
- cos
Params GetCkafka Datahub Task Task List Source Resource Cos Param[] - Cos configuration, required when Type is COS.
- ctsdb
Params GetCkafka Datahub Task Task List Source Resource Ctsdb Param[] - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params GetCkafka Datahub Task Task List Source Resource Dts Param[] - Dts configuration, required when Type is DTS.
- es
Params GetCkafka Datahub Task Task List Source Resource Es Param[] - Es configuration, required when Type is ES.
- event
Bus GetParams Ckafka Datahub Task Task List Source Resource Event Bus Param[] - EB configuration, required when type is EB.
- kafka
Params GetCkafka Datahub Task Task List Source Resource Kafka Param[] - ckafka configuration, required when Type is KAFKA.
- maria
Db GetParams Ckafka Datahub Task Task List Source Resource Maria Db Param[] - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db GetParams Ckafka Datahub Task Task List Source Resource Mongo Db Param[] - MongoDB config, Required when Type is MONGODB.
- my
Sql GetParams Ckafka Datahub Task Task List Source Resource My Sql Param[] - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql GetParams Ckafka Datahub Task Task List Source Resource Postgre Sql Param[] - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params GetCkafka Datahub Task Task List Source Resource Scf Param[] - Scf configuration, Required when Type is SCF.
- sql
Server GetParams Ckafka Datahub Task Task List Source Resource Sql Server Param[] - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params GetCkafka Datahub Task Task List Source Resource Tdw Param[] - Tdw configuration, required when Type is TDW.
- topic
Params GetCkafka Datahub Task Task List Source Resource Topic Param[] - Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- click_
house_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Click House Param] - ClickHouse config, Type CLICKHOUSE requierd.
- cls_
params Sequence[GetCkafka Datahub Task Task List Source Resource Cls Param] - Cls configuration, Required when Type is CLS.
- cos_
params Sequence[GetCkafka Datahub Task Task List Source Resource Cos Param] - Cos configuration, required when Type is COS.
- ctsdb_
params Sequence[GetCkafka Datahub Task Task List Source Resource Ctsdb Param] - Ctsdb configuration, Required when Type is CTSDB.
- dts_
params Sequence[GetCkafka Datahub Task Task List Source Resource Dts Param] - Dts configuration, required when Type is DTS.
- es_
params Sequence[GetCkafka Datahub Task Task List Source Resource Es Param] - Es configuration, required when Type is ES.
- event_
bus_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Event Bus Param] - EB configuration, required when type is EB.
- kafka_
params Sequence[GetCkafka Datahub Task Task List Source Resource Kafka Param] - ckafka configuration, required when Type is KAFKA.
- maria_
db_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Maria Db Param] - MariaDB configuration, Required when Type is MARIADB.
- mongo_
db_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Mongo Db Param] - MongoDB config, Required when Type is MONGODB.
- my_
sql_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource My Sql Param] - MySQL configuration, Required when Type is MYSQL.
- postgre_
sql_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Postgre Sql Param] - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_
params Sequence[GetCkafka Datahub Task Task List Source Resource Scf Param] - Scf configuration, Required when Type is SCF.
- sql_
server_ Sequence[Getparams Ckafka Datahub Task Task List Source Resource Sql Server Param] - SQLServer configuration, Required when Type is SQLSERVER.
- tdw_
params Sequence[GetCkafka Datahub Task Task List Source Resource Tdw Param] - Tdw configuration, required when Type is TDW.
- topic_
params Sequence[GetCkafka Datahub Task Task List Source Resource Topic Param] - Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- click
House List<Property Map>Params - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params List<Property Map> - Cls configuration, Required when Type is CLS.
- cos
Params List<Property Map> - Cos configuration, required when Type is COS.
- ctsdb
Params List<Property Map> - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params List<Property Map> - Dts configuration, required when Type is DTS.
- es
Params List<Property Map> - Es configuration, required when Type is ES.
- event
Bus List<Property Map>Params - EB configuration, required when type is EB.
- kafka
Params List<Property Map> - ckafka configuration, required when Type is KAFKA.
- maria
Db List<Property Map>Params - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db List<Property Map>Params - MongoDB config, Required when Type is MONGODB.
- my
Sql List<Property Map>Params - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql List<Property Map>Params - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params List<Property Map> - Scf configuration, Required when Type is SCF.
- sql
Server List<Property Map>Params - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params List<Property Map> - Tdw configuration, required when Type is TDW.
- topic
Params List<Property Map> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParam
- Cluster string
- ClickHouse cluster.
- Database string
- SQLServer database name.
- Drop
Cls List<GetCkafka Datahub Task Task List Source Resource Click House Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Ip string
- Mongo DB connection ip.
- Password string
- MongoDB database password.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Schemas
List<Get
Ckafka Datahub Task Task List Source Resource Click House Param Schema> - ClickHouse schema.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Type string
- Resource Type.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- Cluster string
- ClickHouse cluster.
- Database string
- SQLServer database name.
- Drop
Cls []GetCkafka Datahub Task Task List Source Resource Click House Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Ip string
- Mongo DB connection ip.
- Password string
- MongoDB database password.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Schemas
[]Get
Ckafka Datahub Task Task List Source Resource Click House Param Schema - ClickHouse schema.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Type string
- Resource Type.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- cluster String
- ClickHouse cluster.
- database String
- SQLServer database name.
- drop
Cls List<GetCkafka Datahub Task Task List Source Resource Click House Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip String
- Mongo DB connection ip.
- password String
- MongoDB database password.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- schemas
List<Get
Ckafka Datahub Task Task List Source Resource Click House Param Schema> - ClickHouse schema.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type String
- Resource Type.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
- cluster string
- ClickHouse cluster.
- database string
- SQLServer database name.
- drop
Cls GetCkafka Datahub Task Task List Source Resource Click House Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip string
- Mongo DB connection ip.
- password string
- MongoDB database password.
- port number
- MongoDB connection port.
- resource string
- Resource.
- schemas
Get
Ckafka Datahub Task Task List Source Resource Click House Param Schema[] - ClickHouse schema.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type string
- Resource Type.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - MongoDB database user name.
- cluster str
- ClickHouse cluster.
- database str
- SQLServer database name.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Source Resource Click House Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- ip str
- Mongo DB connection ip.
- password str
- MongoDB database password.
- port float
- MongoDB connection port.
- resource str
- Resource.
- schemas
Sequence[Get
Ckafka Datahub Task Task List Source Resource Click House Param Schema] - ClickHouse schema.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type str
- Resource Type.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - MongoDB database user name.
- cluster String
- ClickHouse cluster.
- database String
- SQLServer database name.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip String
- Mongo DB connection ip.
- password String
- MongoDB database password.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- schemas List<Property Map>
- ClickHouse schema.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type String
- Resource Type.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListSourceResourceClickHouseParamSchema
- Allow
Null bool - Whether the message is allowed to be empty.
- Column
Name string - Column Name.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Column
Name string - Column Name.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- column
Name String - Column Name.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- column
Name string - Column Name.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- column_
name str - Column Name.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- column
Name String - Column Name.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceClsParam
- Content
Key string - key for data in non-json format.
- Decode
Json bool - Whether the produced information is in json format.
- Log
Set string - LogSet id.
- Resource string
- Resource.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- Content
Key string - key for data in non-json format.
- Decode
Json bool - Whether the produced information is in json format.
- Log
Set string - LogSet id.
- Resource string
- Resource.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key String - key for data in non-json format.
- decode
Json Boolean - Whether the produced information is in json format.
- log
Set String - LogSet id.
- resource String
- Resource.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key string - key for data in non-json format.
- decode
Json boolean - Whether the produced information is in json format.
- log
Set string - LogSet id.
- resource string
- Resource.
- time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content_
key str - key for data in non-json format.
- decode_
json bool - Whether the produced information is in json format.
- log_
set str - LogSet id.
- resource str
- Resource.
- time_
field str - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key String - key for data in non-json format.
- decode
Json Boolean - Whether the produced information is in json format.
- log
Set String - LogSet id.
- resource String
- Resource.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
GetCkafkaDatahubTaskTaskListSourceResourceCosParam
- Aggregate
Batch doubleSize - The size of aggregated messages MB.
- Aggregate
Interval double - time interval.
- Bucket
Name string - cos bucket name.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Region string
- region code.
- Aggregate
Batch float64Size - The size of aggregated messages MB.
- Aggregate
Interval float64 - time interval.
- Bucket
Name string - cos bucket name.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Region string
- region code.
- aggregate
Batch DoubleSize - The size of aggregated messages MB.
- aggregate
Interval Double - time interval.
- bucket
Name String - cos bucket name.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- region String
- region code.
- aggregate
Batch numberSize - The size of aggregated messages MB.
- aggregate
Interval number - time interval.
- bucket
Name string - cos bucket name.
- directory
Time stringFormat - Partition format formatted according to strptime time.
- format
Output stringType - The file format after message aggregation csv|json.
- object
Key string - ObjectKey.
- object
Key stringPrefix - Dumped object directory prefix.
- region string
- region code.
- aggregate_
batch_ floatsize - The size of aggregated messages MB.
- aggregate_
interval float - time interval.
- bucket_
name str - cos bucket name.
- directory_
time_ strformat - Partition format formatted according to strptime time.
- format_
output_ strtype - The file format after message aggregation csv|json.
- object_
key str - ObjectKey.
- object_
key_ strprefix - Dumped object directory prefix.
- region str
- region code.
- aggregate
Batch NumberSize - The size of aggregated messages MB.
- aggregate
Interval Number - time interval.
- bucket
Name String - cos bucket name.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- region String
- region code.
GetCkafkaDatahubTaskTaskListSourceResourceCtsdbParam
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- Resource.
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- Resource.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- Resource.
- ctsdb
Metric string - Ctsdb metric.
- resource string
- Resource.
- ctsdb_
metric str - Ctsdb metric.
- resource str
- Resource.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- Resource.
GetCkafkaDatahubTaskTaskListSourceResourceDtsParam
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Mongo DB connection ip.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Topic string
- Topic name, use
,
when more than 1 topic. - Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Mongo DB connection ip.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Topic string
- Topic name, use
,
when more than 1 topic. - Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Mongo DB connection ip.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- topic String
- Topic name, use
,
when more than 1 topic. - tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id string - Dts consumer group Id.
- group
Password string - Dts consumer group passwd.
- group
User string - Dts account.
- ip string
- Mongo DB connection ip.
- port number
- MongoDB connection port.
- resource string
- Resource.
- topic string
- Topic name, use
,
when more than 1 topic. - tran
Sql boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group_
id str - Dts consumer group Id.
- group_
password str - Dts consumer group passwd.
- group_
user str - Dts account.
- ip str
- Mongo DB connection ip.
- port float
- MongoDB connection port.
- resource str
- Resource.
- topic str
- Topic name, use
,
when more than 1 topic. - tran_
sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Mongo DB connection ip.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- topic String
- Topic name, use
,
when more than 1 topic. - tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
GetCkafkaDatahubTaskTaskListSourceResourceEsParam
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlqs List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq> - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- MongoDB database password.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls []GetCkafka Datahub Task Task List Source Resource Es Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlqs []GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- MongoDB database password.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq> - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- MongoDB database password.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
- content
Key string - key for data in non-json format.
- database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format string - Es date suffix.
- document
Id stringField - The field name of the document ID value dumped into Es.
- drop
Cls GetCkafka Datahub Task Task List Source Resource Es Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq[] - dead letter queue.
- drop
Invalid booleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- index string
- Es index name.
- index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- MongoDB database password.
- port number
- MongoDB connection port.
- resource string
- Resource.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - MongoDB database user name.
- content_
key str - key for data in non-json format.
- database_
primary_ strkey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_
format str - Es date suffix.
- document_
id_ strfield - The field name of the document ID value dumped into Es.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Source Resource Es Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
dlqs Sequence[GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq] - dead letter queue.
- drop_
invalid_ booljson_ message - Whether Es discards messages in non-json format.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- index str
- Es index name.
- index_
type str - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- MongoDB database password.
- port float
- MongoDB connection port.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - MongoDB database user name.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs List<Property Map> - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- MongoDB database password.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlq
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Params List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param> - ckafka configuration, required when Type is KAFKA.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Params List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Topic Param> - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Params []GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param - ckafka configuration, required when Type is KAFKA.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Params []GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Params List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param> - ckafka configuration, required when Type is KAFKA.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Params List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Topic Param> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Params GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param[] - ckafka configuration, required when Type is KAFKA.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Params GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Topic Param[] - Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
params Sequence[GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param] - ckafka configuration, required when Type is KAFKA.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
params Sequence[GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Topic Param] - Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Params List<Property Map> - ckafka configuration, required when Type is KAFKA.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Params List<Property Map> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings []GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync stringType - ConnectorSyncType.
- enable
Toleration boolean - enable dead letter queue.
- keep
Partition boolean - KeepPartition.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource string
- Resource.
- resource
Name string - instance name.
- self
Built boolean - Whether it is a self-built cluster.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector_
sync_ strtype - ConnectorSyncType.
- enable_
toleration bool - enable dead letter queue.
- keep_
partition bool - KeepPartition.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource str
- Resource.
- resource_
name str - instance name.
- self_
built bool - Whether it is a self-built cluster.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table_
mappings Sequence[GetCkafka Datahub Task Task List Source Resource Es Param Drop Dlq Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqKafkaParamTableMapping
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- database string
- SQLServer database name.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- database str
- SQLServer database name.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
GetCkafkaDatahubTaskTaskListSourceResourceEsParamDropDlqTopicParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource string
- Resource.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource str
- Resource.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
GetCkafkaDatahubTaskTaskListSourceResourceEventBusParam
- function_
name str - SCF function name.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- type str
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceKafkaParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings List<GetCkafka Datahub Task Task List Source Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings []GetCkafka Datahub Task Task List Source Resource Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<GetCkafka Datahub Task Task List Source Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync stringType - ConnectorSyncType.
- enable
Toleration boolean - enable dead letter queue.
- keep
Partition boolean - KeepPartition.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource string
- Resource.
- resource
Name string - instance name.
- self
Built boolean - Whether it is a self-built cluster.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings GetCkafka Datahub Task Task List Source Resource Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector_
sync_ strtype - ConnectorSyncType.
- enable_
toleration bool - enable dead letter queue.
- keep_
partition bool - KeepPartition.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource str
- Resource.
- resource_
name str - instance name.
- self_
built bool - Whether it is a self-built cluster.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table_
mappings Sequence[GetCkafka Datahub Task Task List Source Resource Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
GetCkafkaDatahubTaskTaskListSourceResourceKafkaParamTableMapping
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- database string
- SQLServer database name.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- database str
- SQLServer database name.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
GetCkafkaDatahubTaskTaskListSourceResourceMariaDbParam
- Database string
- SQLServer database name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Database string
- SQLServer database name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database string
- SQLServer database name.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database str
- SQLServer database name.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListSourceResourceMongoDbParam
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- SQLServer database name.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- SQLServer database name.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- SQLServer database name.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
- collection string
- MongoDB collection.
- copy
Existing boolean - Whether to copy the stock data, the default parameter is true.
- database string
- SQLServer database name.
- ip string
- Mongo DB connection ip.
- listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- read
Preference string - Master-slave priority, default master node.
- resource string
- Resource.
- self
Built boolean - Whether it is a self-built cluster.
- user
Name string - MongoDB database user name.
- collection str
- MongoDB collection.
- copy_
existing bool - Whether to copy the stock data, the default parameter is true.
- database str
- SQLServer database name.
- ip str
- Mongo DB connection ip.
- listening_
event str - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_
preference str - Master-slave priority, default master node.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- user_
name str - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- SQLServer database name.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListSourceResourceMySqlParam
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Source Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls List<GetCkafka Datahub Task Task List Source Resource My Sql Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []GetRecord Mappings Ckafka Datahub Task Task List Source Resource My Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls []GetCkafka Datahub Task Task List Source Resource My Sql Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Source Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls List<GetCkafka Datahub Task Task List Source Resource My Sql Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- signal
Database String - database name of signal table.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
- data
Source stringIncrement Column - the name of the column to be monitored.
- data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target GetRecord Mappings Ckafka Datahub Task Task List Source Resource My Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- database string
- SQLServer database name.
- ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls GetCkafka Datahub Task Task List Source Resource My Sql Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- signal
Database string - database name of signal table.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement string - TopicRegex, $1, $2.
- data_
source_ strincrement_ column - the name of the column to be monitored.
- data_
source_ strincrement_ mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_
source_ strmonitor_ mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_
source_ strmonitor_ resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_
source_ strstart_ from - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Getrecord_ mappings Ckafka Datahub Task Task List Source Resource My Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- database str
- SQLServer database name.
- ddl_
topic str - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Source Resource My Sql Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- signal_
database str - database name of signal table.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic_
regex str - Regular expression for routing events to specific topics, defaults to (.*).
- topic_
replacement str - TopicRegex, $1, $2.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- signal
Database String - database name of signal table.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDataTargetRecordMapping
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceMySqlParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParam
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Source Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []GetRecord Mappings Ckafka Datahub Task Task List Source Resource Postgre Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Source Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target GetRecord Mappings Ckafka Datahub Task Task List Source Resource Postgre Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- database string
- SQLServer database name.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data_
format str - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Getrecord_ mappings Ckafka Datahub Task Task List Source Resource Postgre Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- database str
- SQLServer database name.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin_
name str - (decoderbufs/pgoutput), default decoderbufs.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListSourceResourcePostgreSqlParamDataTargetRecordMapping
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListSourceResourceScfParam
- Batch
Size double - The maximum number of messages sent in each batch, the default is 1000.
- Function
Name string - SCF function name.
- Max
Retries double - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- Batch
Size float64 - The maximum number of messages sent in each batch, the default is 1000.
- Function
Name string - SCF function name.
- Max
Retries float64 - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size Double - The maximum number of messages sent in each batch, the default is 1000.
- function
Name String - SCF function name.
- max
Retries Double - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size number - The maximum number of messages sent in each batch, the default is 1000.
- function
Name string - SCF function name.
- max
Retries number - The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- batch_
size float - The maximum number of messages sent in each batch, the default is 1000.
- function_
name str - SCF function name.
- max_
retries float - The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size Number - The maximum number of messages sent in each batch, the default is 1000.
- function
Name String - SCF function name.
- max
Retries Number - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
GetCkafkaDatahubTaskTaskListSourceResourceSqlServerParam
- Database string
- SQLServer database name.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Database string
- SQLServer database name.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database string
- SQLServer database name.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database str
- SQLServer database name.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListSourceResourceTdwParam
- Bid string
- Tdw bid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port double - TDW port, default 8099.
- Tid string
- Tdw tid.
- Bid string
- Tdw bid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port float64 - TDW port, default 8099.
- Tid string
- Tdw tid.
- bid String
- Tdw bid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Double - TDW port, default 8099.
- tid String
- Tdw tid.
- bid string
- Tdw bid.
- is
Domestic boolean - default true.
- tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port number - TDW port, default 8099.
- tid string
- Tdw tid.
- bid str
- Tdw bid.
- is_
domestic bool - default true.
- tdw_
host str - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_
port float - TDW port, default 8099.
- tid str
- Tdw tid.
- bid String
- Tdw bid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Number - TDW port, default 8099.
- tid String
- Tdw tid.
GetCkafkaDatahubTaskTaskListSourceResourceTopicParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource string
- Resource.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource str
- Resource.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
GetCkafkaDatahubTaskTaskListTargetResource
- Click
House List<GetParams Ckafka Datahub Task Task List Target Resource Click House Param> - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Params List<GetCkafka Datahub Task Task List Target Resource Cls Param> - Cls configuration, Required when Type is CLS.
- Cos
Params List<GetCkafka Datahub Task Task List Target Resource Cos Param> - Cos configuration, required when Type is COS.
- Ctsdb
Params List<GetCkafka Datahub Task Task List Target Resource Ctsdb Param> - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Params List<GetCkafka Datahub Task Task List Target Resource Dts Param> - Dts configuration, required when Type is DTS.
- Es
Params List<GetCkafka Datahub Task Task List Target Resource Es Param> - Es configuration, required when Type is ES.
- Event
Bus List<GetParams Ckafka Datahub Task Task List Target Resource Event Bus Param> - EB configuration, required when type is EB.
- Kafka
Params List<GetCkafka Datahub Task Task List Target Resource Kafka Param> - ckafka configuration, required when Type is KAFKA.
- Maria
Db List<GetParams Ckafka Datahub Task Task List Target Resource Maria Db Param> - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db List<GetParams Ckafka Datahub Task Task List Target Resource Mongo Db Param> - MongoDB config, Required when Type is MONGODB.
- My
Sql List<GetParams Ckafka Datahub Task Task List Target Resource My Sql Param> - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql List<GetParams Ckafka Datahub Task Task List Target Resource Postgre Sql Param> - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Params List<GetCkafka Datahub Task Task List Target Resource Scf Param> - Scf configuration, Required when Type is SCF.
- Sql
Server List<GetParams Ckafka Datahub Task Task List Target Resource Sql Server Param> - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Params List<GetCkafka Datahub Task Task List Target Resource Tdw Param> - Tdw configuration, required when Type is TDW.
- Topic
Params List<GetCkafka Datahub Task Task List Target Resource Topic Param> - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- Click
House []GetParams Ckafka Datahub Task Task List Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Params []GetCkafka Datahub Task Task List Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Params []GetCkafka Datahub Task Task List Target Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Params []GetCkafka Datahub Task Task List Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Params []GetCkafka Datahub Task Task List Target Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Params []GetCkafka Datahub Task Task List Target Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus []GetParams Ckafka Datahub Task Task List Target Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Params []GetCkafka Datahub Task Task List Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db []GetParams Ckafka Datahub Task Task List Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db []GetParams Ckafka Datahub Task Task List Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql []GetParams Ckafka Datahub Task Task List Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql []GetParams Ckafka Datahub Task Task List Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Params []GetCkafka Datahub Task Task List Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server []GetParams Ckafka Datahub Task Task List Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Params []GetCkafka Datahub Task Task List Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Params []GetCkafka Datahub Task Task List Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- click
House List<GetParams Ckafka Datahub Task Task List Target Resource Click House Param> - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params List<GetCkafka Datahub Task Task List Target Resource Cls Param> - Cls configuration, Required when Type is CLS.
- cos
Params List<GetCkafka Datahub Task Task List Target Resource Cos Param> - Cos configuration, required when Type is COS.
- ctsdb
Params List<GetCkafka Datahub Task Task List Target Resource Ctsdb Param> - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params List<GetCkafka Datahub Task Task List Target Resource Dts Param> - Dts configuration, required when Type is DTS.
- es
Params List<GetCkafka Datahub Task Task List Target Resource Es Param> - Es configuration, required when Type is ES.
- event
Bus List<GetParams Ckafka Datahub Task Task List Target Resource Event Bus Param> - EB configuration, required when type is EB.
- kafka
Params List<GetCkafka Datahub Task Task List Target Resource Kafka Param> - ckafka configuration, required when Type is KAFKA.
- maria
Db List<GetParams Ckafka Datahub Task Task List Target Resource Maria Db Param> - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db List<GetParams Ckafka Datahub Task Task List Target Resource Mongo Db Param> - MongoDB config, Required when Type is MONGODB.
- my
Sql List<GetParams Ckafka Datahub Task Task List Target Resource My Sql Param> - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql List<GetParams Ckafka Datahub Task Task List Target Resource Postgre Sql Param> - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params List<GetCkafka Datahub Task Task List Target Resource Scf Param> - Scf configuration, Required when Type is SCF.
- sql
Server List<GetParams Ckafka Datahub Task Task List Target Resource Sql Server Param> - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params List<GetCkafka Datahub Task Task List Target Resource Tdw Param> - Tdw configuration, required when Type is TDW.
- topic
Params List<GetCkafka Datahub Task Task List Target Resource Topic Param> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- click
House GetParams Ckafka Datahub Task Task List Target Resource Click House Param[] - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params GetCkafka Datahub Task Task List Target Resource Cls Param[] - Cls configuration, Required when Type is CLS.
- cos
Params GetCkafka Datahub Task Task List Target Resource Cos Param[] - Cos configuration, required when Type is COS.
- ctsdb
Params GetCkafka Datahub Task Task List Target Resource Ctsdb Param[] - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params GetCkafka Datahub Task Task List Target Resource Dts Param[] - Dts configuration, required when Type is DTS.
- es
Params GetCkafka Datahub Task Task List Target Resource Es Param[] - Es configuration, required when Type is ES.
- event
Bus GetParams Ckafka Datahub Task Task List Target Resource Event Bus Param[] - EB configuration, required when type is EB.
- kafka
Params GetCkafka Datahub Task Task List Target Resource Kafka Param[] - ckafka configuration, required when Type is KAFKA.
- maria
Db GetParams Ckafka Datahub Task Task List Target Resource Maria Db Param[] - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db GetParams Ckafka Datahub Task Task List Target Resource Mongo Db Param[] - MongoDB config, Required when Type is MONGODB.
- my
Sql GetParams Ckafka Datahub Task Task List Target Resource My Sql Param[] - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql GetParams Ckafka Datahub Task Task List Target Resource Postgre Sql Param[] - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params GetCkafka Datahub Task Task List Target Resource Scf Param[] - Scf configuration, Required when Type is SCF.
- sql
Server GetParams Ckafka Datahub Task Task List Target Resource Sql Server Param[] - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params GetCkafka Datahub Task Task List Target Resource Tdw Param[] - Tdw configuration, required when Type is TDW.
- topic
Params GetCkafka Datahub Task Task List Target Resource Topic Param[] - Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- click_
house_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Click House Param] - ClickHouse config, Type CLICKHOUSE requierd.
- cls_
params Sequence[GetCkafka Datahub Task Task List Target Resource Cls Param] - Cls configuration, Required when Type is CLS.
- cos_
params Sequence[GetCkafka Datahub Task Task List Target Resource Cos Param] - Cos configuration, required when Type is COS.
- ctsdb_
params Sequence[GetCkafka Datahub Task Task List Target Resource Ctsdb Param] - Ctsdb configuration, Required when Type is CTSDB.
- dts_
params Sequence[GetCkafka Datahub Task Task List Target Resource Dts Param] - Dts configuration, required when Type is DTS.
- es_
params Sequence[GetCkafka Datahub Task Task List Target Resource Es Param] - Es configuration, required when Type is ES.
- event_
bus_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Event Bus Param] - EB configuration, required when type is EB.
- kafka_
params Sequence[GetCkafka Datahub Task Task List Target Resource Kafka Param] - ckafka configuration, required when Type is KAFKA.
- maria_
db_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Maria Db Param] - MariaDB configuration, Required when Type is MARIADB.
- mongo_
db_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Mongo Db Param] - MongoDB config, Required when Type is MONGODB.
- my_
sql_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource My Sql Param] - MySQL configuration, Required when Type is MYSQL.
- postgre_
sql_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Postgre Sql Param] - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_
params Sequence[GetCkafka Datahub Task Task List Target Resource Scf Param] - Scf configuration, Required when Type is SCF.
- sql_
server_ Sequence[Getparams Ckafka Datahub Task Task List Target Resource Sql Server Param] - SQLServer configuration, Required when Type is SQLSERVER.
- tdw_
params Sequence[GetCkafka Datahub Task Task List Target Resource Tdw Param] - Tdw configuration, required when Type is TDW.
- topic_
params Sequence[GetCkafka Datahub Task Task List Target Resource Topic Param] - Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- click
House List<Property Map>Params - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Params List<Property Map> - Cls configuration, Required when Type is CLS.
- cos
Params List<Property Map> - Cos configuration, required when Type is COS.
- ctsdb
Params List<Property Map> - Ctsdb configuration, Required when Type is CTSDB.
- dts
Params List<Property Map> - Dts configuration, required when Type is DTS.
- es
Params List<Property Map> - Es configuration, required when Type is ES.
- event
Bus List<Property Map>Params - EB configuration, required when type is EB.
- kafka
Params List<Property Map> - ckafka configuration, required when Type is KAFKA.
- maria
Db List<Property Map>Params - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db List<Property Map>Params - MongoDB config, Required when Type is MONGODB.
- my
Sql List<Property Map>Params - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql List<Property Map>Params - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Params List<Property Map> - Scf configuration, Required when Type is SCF.
- sql
Server List<Property Map>Params - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Params List<Property Map> - Tdw configuration, required when Type is TDW.
- topic
Params List<Property Map> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParam
- Cluster string
- ClickHouse cluster.
- Database string
- SQLServer database name.
- Drop
Cls List<GetCkafka Datahub Task Task List Target Resource Click House Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Ip string
- Mongo DB connection ip.
- Password string
- MongoDB database password.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Schemas
List<Get
Ckafka Datahub Task Task List Target Resource Click House Param Schema> - ClickHouse schema.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Type string
- Resource Type.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- Cluster string
- ClickHouse cluster.
- Database string
- SQLServer database name.
- Drop
Cls []GetCkafka Datahub Task Task List Target Resource Click House Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Ip string
- Mongo DB connection ip.
- Password string
- MongoDB database password.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Schemas
[]Get
Ckafka Datahub Task Task List Target Resource Click House Param Schema - ClickHouse schema.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Type string
- Resource Type.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- cluster String
- ClickHouse cluster.
- database String
- SQLServer database name.
- drop
Cls List<GetCkafka Datahub Task Task List Target Resource Click House Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip String
- Mongo DB connection ip.
- password String
- MongoDB database password.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- schemas
List<Get
Ckafka Datahub Task Task List Target Resource Click House Param Schema> - ClickHouse schema.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type String
- Resource Type.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
- cluster string
- ClickHouse cluster.
- database string
- SQLServer database name.
- drop
Cls GetCkafka Datahub Task Task List Target Resource Click House Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip string
- Mongo DB connection ip.
- password string
- MongoDB database password.
- port number
- MongoDB connection port.
- resource string
- Resource.
- schemas
Get
Ckafka Datahub Task Task List Target Resource Click House Param Schema[] - ClickHouse schema.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type string
- Resource Type.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - MongoDB database user name.
- cluster str
- ClickHouse cluster.
- database str
- SQLServer database name.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Target Resource Click House Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- ip str
- Mongo DB connection ip.
- password str
- MongoDB database password.
- port float
- MongoDB connection port.
- resource str
- Resource.
- schemas
Sequence[Get
Ckafka Datahub Task Task List Target Resource Click House Param Schema] - ClickHouse schema.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type str
- Resource Type.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - MongoDB database user name.
- cluster String
- ClickHouse cluster.
- database String
- SQLServer database name.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- ip String
- Mongo DB connection ip.
- password String
- MongoDB database password.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- schemas List<Property Map>
- ClickHouse schema.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- type String
- Resource Type.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListTargetResourceClickHouseParamSchema
- Allow
Null bool - Whether the message is allowed to be empty.
- Column
Name string - Column Name.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Column
Name string - Column Name.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- column
Name String - Column Name.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- column
Name string - Column Name.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- column_
name str - Column Name.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- column
Name String - Column Name.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceClsParam
- Content
Key string - key for data in non-json format.
- Decode
Json bool - Whether the produced information is in json format.
- Log
Set string - LogSet id.
- Resource string
- Resource.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- Content
Key string - key for data in non-json format.
- Decode
Json bool - Whether the produced information is in json format.
- Log
Set string - LogSet id.
- Resource string
- Resource.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key String - key for data in non-json format.
- decode
Json Boolean - Whether the produced information is in json format.
- log
Set String - LogSet id.
- resource String
- Resource.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key string - key for data in non-json format.
- decode
Json boolean - Whether the produced information is in json format.
- log
Set string - LogSet id.
- resource string
- Resource.
- time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content_
key str - key for data in non-json format.
- decode_
json bool - Whether the produced information is in json format.
- log_
set str - LogSet id.
- resource str
- Resource.
- time_
field str - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- content
Key String - key for data in non-json format.
- decode
Json Boolean - Whether the produced information is in json format.
- log
Set String - LogSet id.
- resource String
- Resource.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
GetCkafkaDatahubTaskTaskListTargetResourceCosParam
- Aggregate
Batch doubleSize - The size of aggregated messages MB.
- Aggregate
Interval double - time interval.
- Bucket
Name string - cos bucket name.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Region string
- region code.
- Aggregate
Batch float64Size - The size of aggregated messages MB.
- Aggregate
Interval float64 - time interval.
- Bucket
Name string - cos bucket name.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Region string
- region code.
- aggregate
Batch DoubleSize - The size of aggregated messages MB.
- aggregate
Interval Double - time interval.
- bucket
Name String - cos bucket name.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- region String
- region code.
- aggregate
Batch numberSize - The size of aggregated messages MB.
- aggregate
Interval number - time interval.
- bucket
Name string - cos bucket name.
- directory
Time stringFormat - Partition format formatted according to strptime time.
- format
Output stringType - The file format after message aggregation csv|json.
- object
Key string - ObjectKey.
- object
Key stringPrefix - Dumped object directory prefix.
- region string
- region code.
- aggregate_
batch_ floatsize - The size of aggregated messages MB.
- aggregate_
interval float - time interval.
- bucket_
name str - cos bucket name.
- directory_
time_ strformat - Partition format formatted according to strptime time.
- format_
output_ strtype - The file format after message aggregation csv|json.
- object_
key str - ObjectKey.
- object_
key_ strprefix - Dumped object directory prefix.
- region str
- region code.
- aggregate
Batch NumberSize - The size of aggregated messages MB.
- aggregate
Interval Number - time interval.
- bucket
Name String - cos bucket name.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- region String
- region code.
GetCkafkaDatahubTaskTaskListTargetResourceCtsdbParam
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- Resource.
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- Resource.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- Resource.
- ctsdb
Metric string - Ctsdb metric.
- resource string
- Resource.
- ctsdb_
metric str - Ctsdb metric.
- resource str
- Resource.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- Resource.
GetCkafkaDatahubTaskTaskListTargetResourceDtsParam
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Mongo DB connection ip.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Topic string
- Topic name, use
,
when more than 1 topic. - Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Mongo DB connection ip.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Topic string
- Topic name, use
,
when more than 1 topic. - Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Mongo DB connection ip.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- topic String
- Topic name, use
,
when more than 1 topic. - tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id string - Dts consumer group Id.
- group
Password string - Dts consumer group passwd.
- group
User string - Dts account.
- ip string
- Mongo DB connection ip.
- port number
- MongoDB connection port.
- resource string
- Resource.
- topic string
- Topic name, use
,
when more than 1 topic. - tran
Sql boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group_
id str - Dts consumer group Id.
- group_
password str - Dts consumer group passwd.
- group_
user str - Dts account.
- ip str
- Mongo DB connection ip.
- port float
- MongoDB connection port.
- resource str
- Resource.
- topic str
- Topic name, use
,
when more than 1 topic. - tran_
sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Mongo DB connection ip.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- topic String
- Topic name, use
,
when more than 1 topic. - tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
GetCkafkaDatahubTaskTaskListTargetResourceEsParam
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlqs List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq> - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- MongoDB database password.
- Port double
- MongoDB connection port.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls []GetCkafka Datahub Task Task List Target Resource Es Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlqs []GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- MongoDB database password.
- Port float64
- MongoDB connection port.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - MongoDB database user name.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq> - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- MongoDB database password.
- port Double
- MongoDB connection port.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
- content
Key string - key for data in non-json format.
- database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format string - Es date suffix.
- document
Id stringField - The field name of the document ID value dumped into Es.
- drop
Cls GetCkafka Datahub Task Task List Target Resource Es Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq[] - dead letter queue.
- drop
Invalid booleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- index string
- Es index name.
- index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- MongoDB database password.
- port number
- MongoDB connection port.
- resource string
- Resource.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - MongoDB database user name.
- content_
key str - key for data in non-json format.
- database_
primary_ strkey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_
format str - Es date suffix.
- document_
id_ strfield - The field name of the document ID value dumped into Es.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Target Resource Es Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
dlqs Sequence[GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq] - dead letter queue.
- drop_
invalid_ booljson_ message - Whether Es discards messages in non-json format.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- index str
- Es index name.
- index_
type str - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- MongoDB database password.
- port float
- MongoDB connection port.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - MongoDB database user name.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlqs List<Property Map> - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- MongoDB database password.
- port Number
- MongoDB connection port.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlq
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Params List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param> - ckafka configuration, required when Type is KAFKA.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Params List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Topic Param> - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Params []GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param - ckafka configuration, required when Type is KAFKA.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Params []GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Params List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param> - ckafka configuration, required when Type is KAFKA.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Params List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Topic Param> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Params GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param[] - ckafka configuration, required when Type is KAFKA.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Params GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Topic Param[] - Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
params Sequence[GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param] - ckafka configuration, required when Type is KAFKA.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
params Sequence[GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Topic Param] - Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Params List<Property Map> - ckafka configuration, required when Type is KAFKA.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Params List<Property Map> - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings []GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync stringType - ConnectorSyncType.
- enable
Toleration boolean - enable dead letter queue.
- keep
Partition boolean - KeepPartition.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource string
- Resource.
- resource
Name string - instance name.
- self
Built boolean - Whether it is a self-built cluster.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector_
sync_ strtype - ConnectorSyncType.
- enable_
toleration bool - enable dead letter queue.
- keep_
partition bool - KeepPartition.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource str
- Resource.
- resource_
name str - instance name.
- self_
built bool - Whether it is a self-built cluster.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table_
mappings Sequence[GetCkafka Datahub Task Task List Target Resource Es Param Drop Dlq Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqKafkaParamTableMapping
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- database string
- SQLServer database name.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- database str
- SQLServer database name.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
GetCkafkaDatahubTaskTaskListTargetResourceEsParamDropDlqTopicParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource string
- Resource.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource str
- Resource.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
GetCkafkaDatahubTaskTaskListTargetResourceEventBusParam
- function_
name str - SCF function name.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- type str
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceKafkaParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings List<GetCkafka Datahub Task Task List Target Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Connector
Sync stringType - ConnectorSyncType.
- Enable
Toleration bool - enable dead letter queue.
- Keep
Partition bool - KeepPartition.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource string
- Resource.
- Resource
Name string - instance name.
- Self
Built bool - Whether it is a self-built cluster.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Table
Mappings []GetCkafka Datahub Task Task List Target Resource Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<GetCkafka Datahub Task Task List Target Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync stringType - ConnectorSyncType.
- enable
Toleration boolean - enable dead letter queue.
- keep
Partition boolean - KeepPartition.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource string
- Resource.
- resource
Name string - instance name.
- self
Built boolean - Whether it is a self-built cluster.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings GetCkafka Datahub Task Task List Target Resource Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector_
sync_ strtype - ConnectorSyncType.
- enable_
toleration bool - enable dead letter queue.
- keep_
partition bool - KeepPartition.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource str
- Resource.
- resource_
name str - instance name.
- self_
built bool - Whether it is a self-built cluster.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table_
mappings Sequence[GetCkafka Datahub Task Task List Target Resource Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- connector
Sync StringType - ConnectorSyncType.
- enable
Toleration Boolean - enable dead letter queue.
- keep
Partition Boolean - KeepPartition.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource String
- Resource.
- resource
Name String - instance name.
- self
Built Boolean - Whether it is a self-built cluster.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
GetCkafkaDatahubTaskTaskListTargetResourceKafkaParamTableMapping
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- Database string
- SQLServer database name.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic string
- Topic name, use
,
when more than 1 topic. - Topic
Id string - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
- database string
- SQLServer database name.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic string
- Topic name, use
,
when more than 1 topic. - topic
Id string - Topic TopicId.
- database str
- SQLServer database name.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic str
- Topic name, use
,
when more than 1 topic. - topic_
id str - Topic TopicId.
- database String
- SQLServer database name.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic String
- Topic name, use
,
when more than 1 topic. - topic
Id String - Topic TopicId.
GetCkafkaDatahubTaskTaskListTargetResourceMariaDbParam
- Database string
- SQLServer database name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Database string
- SQLServer database name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database string
- SQLServer database name.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database str
- SQLServer database name.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListTargetResourceMongoDbParam
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- SQLServer database name.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- SQLServer database name.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Resource string
- Resource.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- SQLServer database name.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
- collection string
- MongoDB collection.
- copy
Existing boolean - Whether to copy the stock data, the default parameter is true.
- database string
- SQLServer database name.
- ip string
- Mongo DB connection ip.
- listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- read
Preference string - Master-slave priority, default master node.
- resource string
- Resource.
- self
Built boolean - Whether it is a self-built cluster.
- user
Name string - MongoDB database user name.
- collection str
- MongoDB collection.
- copy_
existing bool - Whether to copy the stock data, the default parameter is true.
- database str
- SQLServer database name.
- ip str
- Mongo DB connection ip.
- listening_
event str - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_
preference str - Master-slave priority, default master node.
- resource str
- Resource.
- self_
built bool - Whether it is a self-built cluster.
- user_
name str - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- SQLServer database name.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- resource String
- Resource.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
GetCkafkaDatahubTaskTaskListTargetResourceMySqlParam
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Target Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls List<GetCkafka Datahub Task Task List Target Resource My Sql Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []GetRecord Mappings Ckafka Datahub Task Task List Target Resource My Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls []GetCkafka Datahub Task Task List Target Resource My Sql Param Drop Cl - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Target Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls List<GetCkafka Datahub Task Task List Target Resource My Sql Param Drop Cl> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- signal
Database String - database name of signal table.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
- data
Source stringIncrement Column - the name of the column to be monitored.
- data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target GetRecord Mappings Ckafka Datahub Task Task List Target Resource My Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- database string
- SQLServer database name.
- ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls GetCkafka Datahub Task Task List Target Resource My Sql Param Drop Cl[] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- signal
Database string - database name of signal table.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement string - TopicRegex, $1, $2.
- data_
source_ strincrement_ column - the name of the column to be monitored.
- data_
source_ strincrement_ mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_
source_ strmonitor_ mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_
source_ strmonitor_ resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_
source_ strstart_ from - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Getrecord_ mappings Ckafka Datahub Task Task List Target Resource My Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- database str
- SQLServer database name.
- ddl_
topic str - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_
cls Sequence[GetCkafka Datahub Task Task List Target Resource My Sql Param Drop Cl] - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- signal_
database str - database name of signal table.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic_
regex str - Regular expression for routing events to specific topics, defaults to (.*).
- topic_
replacement str - TopicRegex, $1, $2.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls List<Property Map> - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- signal
Database String - database name of signal table.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDataTargetRecordMapping
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceMySqlParamDropCl
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParam
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Target Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []GetRecord Mappings Ckafka Datahub Task Task List Target Resource Postgre Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Database string
- SQLServer database name.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<GetRecord Mappings Ckafka Datahub Task Task List Target Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target GetRecord Mappings Ckafka Datahub Task Task List Target Resource Postgre Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- database string
- SQLServer database name.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data_
format str - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Getrecord_ mappings Ckafka Datahub Task Task List Target Resource Postgre Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- database str
- SQLServer database name.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin_
name str - (decoderbufs/pgoutput), default decoderbufs.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- database String
- SQLServer database name.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListTargetResourcePostgreSqlParamDataTargetRecordMapping
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- Resource Type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- Resource Type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- Resource Type.
GetCkafkaDatahubTaskTaskListTargetResourceScfParam
- Batch
Size double - The maximum number of messages sent in each batch, the default is 1000.
- Function
Name string - SCF function name.
- Max
Retries double - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- Batch
Size float64 - The maximum number of messages sent in each batch, the default is 1000.
- Function
Name string - SCF function name.
- Max
Retries float64 - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size Double - The maximum number of messages sent in each batch, the default is 1000.
- function
Name String - SCF function name.
- max
Retries Double - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size number - The maximum number of messages sent in each batch, the default is 1000.
- function
Name string - SCF function name.
- max
Retries number - The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- batch_
size float - The maximum number of messages sent in each batch, the default is 1000.
- function_
name str - SCF function name.
- max_
retries float - The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- batch
Size Number - The maximum number of messages sent in each batch, the default is 1000.
- function
Name String - SCF function name.
- max
Retries Number - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
GetCkafkaDatahubTaskTaskListTargetResourceSqlServerParam
- Database string
- SQLServer database name.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Database string
- SQLServer database name.
- Resource string
- Resource.
- Snapshot
Mode string - schema_only|initial default initial.
- Table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database string
- SQLServer database name.
- resource string
- Resource.
- snapshot
Mode string - schema_only|initial default initial.
- table string
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database str
- SQLServer database name.
- resource str
- Resource.
- snapshot_
mode str - schema_only|initial default initial.
- table str
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- database String
- SQLServer database name.
- resource String
- Resource.
- snapshot
Mode String - schema_only|initial default initial.
- table String
- SQLServer table, is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
GetCkafkaDatahubTaskTaskListTargetResourceTdwParam
- Bid string
- Tdw bid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port double - TDW port, default 8099.
- Tid string
- Tdw tid.
- Bid string
- Tdw bid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port float64 - TDW port, default 8099.
- Tid string
- Tdw tid.
- bid String
- Tdw bid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Double - TDW port, default 8099.
- tid String
- Tdw tid.
- bid string
- Tdw bid.
- is
Domestic boolean - default true.
- tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port number - TDW port, default 8099.
- tid string
- Tdw tid.
- bid str
- Tdw bid.
- is_
domestic bool - default true.
- tdw_
host str - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_
port float - TDW port, default 8099.
- tid str
- Tdw tid.
- bid String
- Tdw bid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Number - TDW port, default 8099.
- tid String
- Tdw tid.
GetCkafkaDatahubTaskTaskListTargetResourceTopicParam
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Resource string
- Resource.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource string
- Resource.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource str
- Resource.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- resource String
- Resource.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the
tencentcloud
Terraform Provider.
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack