tencentcloud.CkafkaDatahubTask
Explore with Pulumi AI
Provides a resource to create a ckafka datahub_task
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const datahubTask = new tencentcloud.CkafkaDatahubTask("datahubTask", {
sourceResource: {
postgreSqlParam: {
database: "postgres",
isTableRegular: false,
keyColumns: "",
pluginName: "decoderbufs",
recordWithSchema: false,
resource: "resource-y9nxnw46",
snapshotMode: "never",
table: "*",
},
type: "POSTGRESQL",
},
targetResource: {
topicParam: {
compressionType: "none",
resource: "1308726196-keep-topic",
useAutoCreateTopic: false,
},
type: "TOPIC",
},
taskName: "test-task123321",
taskType: "SOURCE",
});
import pulumi
import pulumi_tencentcloud as tencentcloud
datahub_task = tencentcloud.CkafkaDatahubTask("datahubTask",
source_resource={
"postgre_sql_param": {
"database": "postgres",
"is_table_regular": False,
"key_columns": "",
"plugin_name": "decoderbufs",
"record_with_schema": False,
"resource": "resource-y9nxnw46",
"snapshot_mode": "never",
"table": "*",
},
"type": "POSTGRESQL",
},
target_resource={
"topic_param": {
"compression_type": "none",
"resource": "1308726196-keep-topic",
"use_auto_create_topic": False,
},
"type": "TOPIC",
},
task_name="test-task123321",
task_type="SOURCE")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := tencentcloud.NewCkafkaDatahubTask(ctx, "datahubTask", &tencentcloud.CkafkaDatahubTaskArgs{
SourceResource: &tencentcloud.CkafkaDatahubTaskSourceResourceArgs{
PostgreSqlParam: &tencentcloud.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs{
Database: pulumi.String("postgres"),
IsTableRegular: pulumi.Bool(false),
KeyColumns: pulumi.String(""),
PluginName: pulumi.String("decoderbufs"),
RecordWithSchema: pulumi.Bool(false),
Resource: pulumi.String("resource-y9nxnw46"),
SnapshotMode: pulumi.String("never"),
Table: pulumi.String("*"),
},
Type: pulumi.String("POSTGRESQL"),
},
TargetResource: &tencentcloud.CkafkaDatahubTaskTargetResourceArgs{
TopicParam: &tencentcloud.CkafkaDatahubTaskTargetResourceTopicParamArgs{
CompressionType: pulumi.String("none"),
Resource: pulumi.String("1308726196-keep-topic"),
UseAutoCreateTopic: pulumi.Bool(false),
},
Type: pulumi.String("TOPIC"),
},
TaskName: pulumi.String("test-task123321"),
TaskType: pulumi.String("SOURCE"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var datahubTask = new Tencentcloud.CkafkaDatahubTask("datahubTask", new()
{
SourceResource = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourceArgs
{
PostgreSqlParam = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs
{
Database = "postgres",
IsTableRegular = false,
KeyColumns = "",
PluginName = "decoderbufs",
RecordWithSchema = false,
Resource = "resource-y9nxnw46",
SnapshotMode = "never",
Table = "*",
},
Type = "POSTGRESQL",
},
TargetResource = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceArgs
{
TopicParam = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs
{
CompressionType = "none",
Resource = "1308726196-keep-topic",
UseAutoCreateTopic = false,
},
Type = "TOPIC",
},
TaskName = "test-task123321",
TaskType = "SOURCE",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.CkafkaDatahubTask;
import com.pulumi.tencentcloud.CkafkaDatahubTaskArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourceArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var datahubTask = new CkafkaDatahubTask("datahubTask", CkafkaDatahubTaskArgs.builder()
.sourceResource(CkafkaDatahubTaskSourceResourceArgs.builder()
.postgreSqlParam(CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs.builder()
.database("postgres")
.isTableRegular(false)
.keyColumns("")
.pluginName("decoderbufs")
.recordWithSchema(false)
.resource("resource-y9nxnw46")
.snapshotMode("never")
.table("*")
.build())
.type("POSTGRESQL")
.build())
.targetResource(CkafkaDatahubTaskTargetResourceArgs.builder()
.topicParam(CkafkaDatahubTaskTargetResourceTopicParamArgs.builder()
.compressionType("none")
.resource("1308726196-keep-topic")
.useAutoCreateTopic(false)
.build())
.type("TOPIC")
.build())
.taskName("test-task123321")
.taskType("SOURCE")
.build());
}
}
resources:
datahubTask:
type: tencentcloud:CkafkaDatahubTask
properties:
sourceResource:
postgreSqlParam:
database: postgres
isTableRegular: false
keyColumns: ""
pluginName: decoderbufs
recordWithSchema: false
resource: resource-y9nxnw46
snapshotMode: never
table: '*'
type: POSTGRESQL
targetResource:
topicParam:
compressionType: none
resource: 1308726196-keep-topic
useAutoCreateTopic: false
type: TOPIC
taskName: test-task123321
taskType: SOURCE
Create CkafkaDatahubTask Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new CkafkaDatahubTask(name: string, args: CkafkaDatahubTaskArgs, opts?: CustomResourceOptions);
@overload
def CkafkaDatahubTask(resource_name: str,
args: CkafkaDatahubTaskArgs,
opts: Optional[ResourceOptions] = None)
@overload
def CkafkaDatahubTask(resource_name: str,
opts: Optional[ResourceOptions] = None,
task_name: Optional[str] = None,
task_type: Optional[str] = None,
ckafka_datahub_task_id: Optional[str] = None,
schema_id: Optional[str] = None,
source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None)
func NewCkafkaDatahubTask(ctx *Context, name string, args CkafkaDatahubTaskArgs, opts ...ResourceOption) (*CkafkaDatahubTask, error)
public CkafkaDatahubTask(string name, CkafkaDatahubTaskArgs args, CustomResourceOptions? opts = null)
public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args)
public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args, CustomResourceOptions options)
type: tencentcloud:CkafkaDatahubTask
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
CkafkaDatahubTask Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The CkafkaDatahubTask resource accepts the following input properties:
- Task
Name string - name of the task.
- Task
Type string - type of the task, SOURCE(data input), SINK(data output).
- Ckafka
Datahub stringTask Id - ID of the resource.
- Schema
Id string - SchemaId.
- Source
Resource CkafkaDatahub Task Source Resource - data resource.
- Target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- Transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- Transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- Task
Name string - name of the task.
- Task
Type string - type of the task, SOURCE(data input), SINK(data output).
- Ckafka
Datahub stringTask Id - ID of the resource.
- Schema
Id string - SchemaId.
- Source
Resource CkafkaDatahub Task Source Resource Args - data resource.
- Target
Resource CkafkaDatahub Task Target Resource Args - Target Resource.
- Transform
Param CkafkaDatahub Task Transform Param Args - Data Processing Rules.
- Transforms
Param CkafkaDatahub Task Transforms Param Args - Data processing rules.
- task
Name String - name of the task.
- task
Type String - type of the task, SOURCE(data input), SINK(data output).
- ckafka
Datahub StringTask Id - ID of the resource.
- schema
Id String - SchemaId.
- source
Resource CkafkaDatahub Task Source Resource - data resource.
- target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- task
Name string - name of the task.
- task
Type string - type of the task, SOURCE(data input), SINK(data output).
- ckafka
Datahub stringTask Id - ID of the resource.
- schema
Id string - SchemaId.
- source
Resource CkafkaDatahub Task Source Resource - data resource.
- target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- task_
name str - name of the task.
- task_
type str - type of the task, SOURCE(data input), SINK(data output).
- ckafka_
datahub_ strtask_ id - ID of the resource.
- schema_
id str - SchemaId.
- source_
resource CkafkaDatahub Task Source Resource Args - data resource.
- target_
resource CkafkaDatahub Task Target Resource Args - Target Resource.
- transform_
param CkafkaDatahub Task Transform Param Args - Data Processing Rules.
- transforms_
param CkafkaDatahub Task Transforms Param Args - Data processing rules.
- task
Name String - name of the task.
- task
Type String - type of the task, SOURCE(data input), SINK(data output).
- ckafka
Datahub StringTask Id - ID of the resource.
- schema
Id String - SchemaId.
- source
Resource Property Map - data resource.
- target
Resource Property Map - Target Resource.
- transform
Param Property Map - Data Processing Rules.
- transforms
Param Property Map - Data processing rules.
Outputs
All input properties are implicitly available as output properties. Additionally, the CkafkaDatahubTask resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing CkafkaDatahubTask Resource
Get an existing CkafkaDatahubTask resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: CkafkaDatahubTaskState, opts?: CustomResourceOptions): CkafkaDatahubTask
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
ckafka_datahub_task_id: Optional[str] = None,
schema_id: Optional[str] = None,
source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
task_name: Optional[str] = None,
task_type: Optional[str] = None,
transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None) -> CkafkaDatahubTask
func GetCkafkaDatahubTask(ctx *Context, name string, id IDInput, state *CkafkaDatahubTaskState, opts ...ResourceOption) (*CkafkaDatahubTask, error)
public static CkafkaDatahubTask Get(string name, Input<string> id, CkafkaDatahubTaskState? state, CustomResourceOptions? opts = null)
public static CkafkaDatahubTask get(String name, Output<String> id, CkafkaDatahubTaskState state, CustomResourceOptions options)
resources: _: type: tencentcloud:CkafkaDatahubTask get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Ckafka
Datahub stringTask Id - ID of the resource.
- Schema
Id string - SchemaId.
- Source
Resource CkafkaDatahub Task Source Resource - data resource.
- Target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- Task
Name string - name of the task.
- Task
Type string - type of the task, SOURCE(data input), SINK(data output).
- Transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- Transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- Ckafka
Datahub stringTask Id - ID of the resource.
- Schema
Id string - SchemaId.
- Source
Resource CkafkaDatahub Task Source Resource Args - data resource.
- Target
Resource CkafkaDatahub Task Target Resource Args - Target Resource.
- Task
Name string - name of the task.
- Task
Type string - type of the task, SOURCE(data input), SINK(data output).
- Transform
Param CkafkaDatahub Task Transform Param Args - Data Processing Rules.
- Transforms
Param CkafkaDatahub Task Transforms Param Args - Data processing rules.
- ckafka
Datahub StringTask Id - ID of the resource.
- schema
Id String - SchemaId.
- source
Resource CkafkaDatahub Task Source Resource - data resource.
- target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- task
Name String - name of the task.
- task
Type String - type of the task, SOURCE(data input), SINK(data output).
- transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- ckafka
Datahub stringTask Id - ID of the resource.
- schema
Id string - SchemaId.
- source
Resource CkafkaDatahub Task Source Resource - data resource.
- target
Resource CkafkaDatahub Task Target Resource - Target Resource.
- task
Name string - name of the task.
- task
Type string - type of the task, SOURCE(data input), SINK(data output).
- transform
Param CkafkaDatahub Task Transform Param - Data Processing Rules.
- transforms
Param CkafkaDatahub Task Transforms Param - Data processing rules.
- ckafka_
datahub_ strtask_ id - ID of the resource.
- schema_
id str - SchemaId.
- source_
resource CkafkaDatahub Task Source Resource Args - data resource.
- target_
resource CkafkaDatahub Task Target Resource Args - Target Resource.
- task_
name str - name of the task.
- task_
type str - type of the task, SOURCE(data input), SINK(data output).
- transform_
param CkafkaDatahub Task Transform Param Args - Data Processing Rules.
- transforms_
param CkafkaDatahub Task Transforms Param Args - Data processing rules.
- ckafka
Datahub StringTask Id - ID of the resource.
- schema
Id String - SchemaId.
- source
Resource Property Map - data resource.
- target
Resource Property Map - Target Resource.
- task
Name String - name of the task.
- task
Type String - type of the task, SOURCE(data input), SINK(data output).
- transform
Param Property Map - Data Processing Rules.
- transforms
Param Property Map - Data processing rules.
Supporting Types
CkafkaDatahubTaskSourceResource, CkafkaDatahubTaskSourceResourceArgs
- Type string
- resource type.
- Click
House CkafkaParam Datahub Task Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Param CkafkaDatahub Task Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Param CkafkaDatahub Task Source Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Param CkafkaDatahub Task Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Param CkafkaDatahub Task Source Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Param CkafkaDatahub Task Source Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus CkafkaParam Datahub Task Source Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Param CkafkaDatahub Task Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db CkafkaParam Datahub Task Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db CkafkaParam Datahub Task Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql CkafkaParam Datahub Task Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql CkafkaParam Datahub Task Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Param CkafkaDatahub Task Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server CkafkaParam Datahub Task Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Param CkafkaDatahub Task Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Param CkafkaDatahub Task Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- resource type.
- Click
House CkafkaParam Datahub Task Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Param CkafkaDatahub Task Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Param CkafkaDatahub Task Source Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Param CkafkaDatahub Task Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Param CkafkaDatahub Task Source Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Param CkafkaDatahub Task Source Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus CkafkaParam Datahub Task Source Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Param CkafkaDatahub Task Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db CkafkaParam Datahub Task Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db CkafkaParam Datahub Task Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql CkafkaParam Datahub Task Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql CkafkaParam Datahub Task Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Param CkafkaDatahub Task Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server CkafkaParam Datahub Task Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Param CkafkaDatahub Task Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Param CkafkaDatahub Task Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- type String
- resource type.
- click
House CkafkaParam Datahub Task Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param CkafkaDatahub Task Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos
Param CkafkaDatahub Task Source Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb
Param CkafkaDatahub Task Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param CkafkaDatahub Task Source Resource Dts Param - Dts configuration, required when Type is DTS.
- es
Param CkafkaDatahub Task Source Resource Es Param - Es configuration, required when Type is ES.
- event
Bus CkafkaParam Datahub Task Source Resource Event Bus Param - EB configuration, required when type is EB.
- kafka
Param CkafkaDatahub Task Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria
Db CkafkaParam Datahub Task Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db CkafkaParam Datahub Task Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my
Sql CkafkaParam Datahub Task Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql CkafkaParam Datahub Task Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param CkafkaDatahub Task Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql
Server CkafkaParam Datahub Task Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param CkafkaDatahub Task Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic
Param CkafkaDatahub Task Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- type string
- resource type.
- click
House CkafkaParam Datahub Task Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param CkafkaDatahub Task Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos
Param CkafkaDatahub Task Source Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb
Param CkafkaDatahub Task Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param CkafkaDatahub Task Source Resource Dts Param - Dts configuration, required when Type is DTS.
- es
Param CkafkaDatahub Task Source Resource Es Param - Es configuration, required when Type is ES.
- event
Bus CkafkaParam Datahub Task Source Resource Event Bus Param - EB configuration, required when type is EB.
- kafka
Param CkafkaDatahub Task Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria
Db CkafkaParam Datahub Task Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db CkafkaParam Datahub Task Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my
Sql CkafkaParam Datahub Task Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql CkafkaParam Datahub Task Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param CkafkaDatahub Task Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql
Server CkafkaParam Datahub Task Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param CkafkaDatahub Task Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic
Param CkafkaDatahub Task Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- type str
- resource type.
- click_
house_ Ckafkaparam Datahub Task Source Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls_
param CkafkaDatahub Task Source Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos_
param CkafkaDatahub Task Source Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb_
param CkafkaDatahub Task Source Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts_
param CkafkaDatahub Task Source Resource Dts Param - Dts configuration, required when Type is DTS.
- es_
param CkafkaDatahub Task Source Resource Es Param - Es configuration, required when Type is ES.
- event_
bus_ Ckafkaparam Datahub Task Source Resource Event Bus Param - EB configuration, required when type is EB.
- kafka_
param CkafkaDatahub Task Source Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria_
db_ Ckafkaparam Datahub Task Source Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo_
db_ Ckafkaparam Datahub Task Source Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my_
sql_ Ckafkaparam Datahub Task Source Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre_
sql_ Ckafkaparam Datahub Task Source Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_
param CkafkaDatahub Task Source Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql_
server_ Ckafkaparam Datahub Task Source Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw_
param CkafkaDatahub Task Source Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic_
param CkafkaDatahub Task Source Resource Topic Param - Topic configuration, Required when Type is Topic.
- type String
- resource type.
- click
House Property MapParam - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param Property Map - Cls configuration, Required when Type is CLS.
- cos
Param Property Map - Cos configuration, required when Type is COS.
- ctsdb
Param Property Map - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param Property Map - Dts configuration, required when Type is DTS.
- es
Param Property Map - Es configuration, required when Type is ES.
- event
Bus Property MapParam - EB configuration, required when type is EB.
- kafka
Param Property Map - ckafka configuration, required when Type is KAFKA.
- maria
Db Property MapParam - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db Property MapParam - MongoDB config, Required when Type is MONGODB.
- my
Sql Property MapParam - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql Property MapParam - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param Property Map - Scf configuration, Required when Type is SCF.
- sql
Server Property MapParam - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param Property Map - Tdw configuration, required when Type is TDW.
- topic
Param Property Map - Topic configuration, Required when Type is Topic.
CkafkaDatahubTaskSourceResourceClickHouseParam, CkafkaDatahubTaskSourceResourceClickHouseParamArgs
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
List<Ckafka
Datahub Task Source Resource Click House Param Schema> - ClickHouse schema.
- Table string
- ClickHouse table.
- Drop
Cls CkafkaDatahub Task Source Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port double
- ClickHouse port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Type string
- type of table column.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - ClickHouse user name.
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
[]Ckafka
Datahub Task Source Resource Click House Param Schema - ClickHouse schema.
- Table string
- ClickHouse table.
- Drop
Cls CkafkaDatahub Task Source Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port float64
- ClickHouse port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Type string
- type of table column.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas
List<Ckafka
Datahub Task Source Resource Click House Param Schema> - ClickHouse schema.
- table String
- ClickHouse table.
- drop
Cls CkafkaDatahub Task Source Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Double
- ClickHouse port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- type String
- type of table column.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - ClickHouse user name.
- cluster string
- ClickHouse cluster.
- database string
- ClickHouse database name.
- resource string
- resource id.
- schemas
Ckafka
Datahub Task Source Resource Click House Param Schema[] - ClickHouse schema.
- table string
- ClickHouse table.
- drop
Cls CkafkaDatahub Task Source Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip string
- ClickHouse ip.
- password string
- ClickHouse passwd.
- port number
- ClickHouse port.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- type string
- type of table column.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - ClickHouse user name.
- cluster str
- ClickHouse cluster.
- database str
- ClickHouse database name.
- resource str
- resource id.
- schemas
Sequence[Ckafka
Datahub Task Source Resource Click House Param Schema] - ClickHouse schema.
- table str
- ClickHouse table.
- drop_
cls CkafkaDatahub Task Source Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip str
- ClickHouse ip.
- password str
- ClickHouse passwd.
- port float
- ClickHouse port.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- type str
- type of table column.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas List<Property Map>
- ClickHouse schema.
- table String
- ClickHouse table.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Number
- ClickHouse port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- type String
- type of table column.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - ClickHouse user name.
CkafkaDatahubTaskSourceResourceClickHouseParamDropCls, CkafkaDatahubTaskSourceResourceClickHouseParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskSourceResourceClickHouseParamSchema, CkafkaDatahubTaskSourceResourceClickHouseParamSchemaArgs
- Allow
Null bool - Whether the column item is allowed to be empty.
- Column
Name string - column name.
- Json
Key string - The json Key name corresponding to this column.
- Type string
- type of table column.
- Allow
Null bool - Whether the column item is allowed to be empty.
- Column
Name string - column name.
- Json
Key string - The json Key name corresponding to this column.
- Type string
- type of table column.
- allow
Null Boolean - Whether the column item is allowed to be empty.
- column
Name String - column name.
- json
Key String - The json Key name corresponding to this column.
- type String
- type of table column.
- allow
Null boolean - Whether the column item is allowed to be empty.
- column
Name string - column name.
- json
Key string - The json Key name corresponding to this column.
- type string
- type of table column.
- allow_
null bool - Whether the column item is allowed to be empty.
- column_
name str - column name.
- json_
key str - The json Key name corresponding to this column.
- type str
- type of table column.
- allow
Null Boolean - Whether the column item is allowed to be empty.
- column
Name String - column name.
- json
Key String - The json Key name corresponding to this column.
- type String
- type of table column.
CkafkaDatahubTaskSourceResourceClsParam, CkafkaDatahubTaskSourceResourceClsParamArgs
- Decode
Json bool - Whether the produced information is in json format.
- Resource string
- cls id.
- Content
Key string - Required when Decode Json is false.
- Log
Set string - LogSet id.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- Decode
Json bool - Whether the produced information is in json format.
- Resource string
- cls id.
- Content
Key string - Required when Decode Json is false.
- Log
Set string - LogSet id.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json Boolean - Whether the produced information is in json format.
- resource String
- cls id.
- content
Key String - Required when Decode Json is false.
- log
Set String - LogSet id.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json boolean - Whether the produced information is in json format.
- resource string
- cls id.
- content
Key string - Required when Decode Json is false.
- log
Set string - LogSet id.
- time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode_
json bool - Whether the produced information is in json format.
- resource str
- cls id.
- content_
key str - Required when Decode Json is false.
- log_
set str - LogSet id.
- time_
field str - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json Boolean - Whether the produced information is in json format.
- resource String
- cls id.
- content
Key String - Required when Decode Json is false.
- log
Set String - LogSet id.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
CkafkaDatahubTaskSourceResourceCosParam, CkafkaDatahubTaskSourceResourceCosParamArgs
- Bucket
Name string - cos bucket name.
- Region string
- region code.
- Aggregate
Batch doubleSize - The size of aggregated messages MB.
- Aggregate
Interval double - time interval.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Bucket
Name string - cos bucket name.
- Region string
- region code.
- Aggregate
Batch float64Size - The size of aggregated messages MB.
- Aggregate
Interval float64 - time interval.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- bucket
Name String - cos bucket name.
- region String
- region code.
- aggregate
Batch DoubleSize - The size of aggregated messages MB.
- aggregate
Interval Double - time interval.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- bucket
Name string - cos bucket name.
- region string
- region code.
- aggregate
Batch numberSize - The size of aggregated messages MB.
- aggregate
Interval number - time interval.
- directory
Time stringFormat - Partition format formatted according to strptime time.
- format
Output stringType - The file format after message aggregation csv|json.
- object
Key string - ObjectKey.
- object
Key stringPrefix - Dumped object directory prefix.
- bucket_
name str - cos bucket name.
- region str
- region code.
- aggregate_
batch_ floatsize - The size of aggregated messages MB.
- aggregate_
interval float - time interval.
- directory_
time_ strformat - Partition format formatted according to strptime time.
- format_
output_ strtype - The file format after message aggregation csv|json.
- object_
key str - ObjectKey.
- object_
key_ strprefix - Dumped object directory prefix.
- bucket
Name String - cos bucket name.
- region String
- region code.
- aggregate
Batch NumberSize - The size of aggregated messages MB.
- aggregate
Interval Number - time interval.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
CkafkaDatahubTaskSourceResourceCtsdbParam, CkafkaDatahubTaskSourceResourceCtsdbParamArgs
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- resource id.
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- resource id.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- resource id.
- ctsdb
Metric string - Ctsdb metric.
- resource string
- resource id.
- ctsdb_
metric str - Ctsdb metric.
- resource str
- resource id.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- resource id.
CkafkaDatahubTaskSourceResourceDtsParam, CkafkaDatahubTaskSourceResourceDtsParamArgs
- Resource string
- Dts instance Id.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Dts connection ip.
- Port double
- Dts connection port.
- Topic string
- Dts topic.
- Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Resource string
- Dts instance Id.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Dts connection ip.
- Port float64
- Dts connection port.
- Topic string
- Dts topic.
- Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Dts connection ip.
- port Double
- Dts connection port.
- topic String
- Dts topic.
- tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource string
- Dts instance Id.
- group
Id string - Dts consumer group Id.
- group
Password string - Dts consumer group passwd.
- group
User string - Dts account.
- ip string
- Dts connection ip.
- port number
- Dts connection port.
- topic string
- Dts topic.
- tran
Sql boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource str
- Dts instance Id.
- group_
id str - Dts consumer group Id.
- group_
password str - Dts consumer group passwd.
- group_
user str - Dts account.
- ip str
- Dts connection ip.
- port float
- Dts connection port.
- topic str
- Dts topic.
- tran_
sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Dts connection ip.
- port Number
- Dts connection port.
- topic String
- Dts topic.
- tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
CkafkaDatahubTaskSourceResourceEsParam, CkafkaDatahubTaskSourceResourceEsParamArgs
- Resource string
- Resource.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls CkafkaDatahub Task Source Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port double
- Es connection port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - Es UserName.
- Resource string
- Resource.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls CkafkaDatahub Task Source Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port float64
- Es connection port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - Es UserName.
- resource String
- Resource.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls CkafkaDatahub Task Source Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether Es discards the message of parsing failure.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Double
- Es connection port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - Es UserName.
- resource string
- Resource.
- content
Key string - key for data in non-json format.
- database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format string - Es date suffix.
- document
Id stringField - The field name of the document ID value dumped into Es.
- drop
Cls CkafkaDatahub Task Source Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq - dead letter queue.
- drop
Invalid booleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid booleanMessage - Whether Es discards the message of parsing failure.
- index string
- Es index name.
- index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- Es Password.
- port number
- Es connection port.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - Es UserName.
- resource str
- Resource.
- content_
key str - key for data in non-json format.
- database_
primary_ strkey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_
format str - Es date suffix.
- document_
id_ strfield - The field name of the document ID value dumped into Es.
- drop_
cls CkafkaDatahub Task Source Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq - dead letter queue.
- drop_
invalid_ booljson_ message - Whether Es discards messages in non-json format.
- drop_
invalid_ boolmessage - Whether Es discards the message of parsing failure.
- index str
- Es index name.
- index_
type str - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- Es Password.
- port float
- Es connection port.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - Es UserName.
- resource String
- Resource.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq Property Map - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether Es discards the message of parsing failure.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Number
- Es connection port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - Es UserName.
CkafkaDatahubTaskSourceResourceEsParamDropCls, CkafkaDatahubTaskSourceResourceEsParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskSourceResourceEsParamDropDlq, CkafkaDatahubTaskSourceResourceEsParamDropDlqArgs
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param Property Map - Ckafka type dlq.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Param Property Map - DIP Topic type dead letter queue.
CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMappingArgs
CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskSourceResourceEventBusParam, CkafkaDatahubTaskSourceResourceEventBusParamArgs
- resource str
- instance id.
- self_
built bool - Whether it is a self-built cluster.
- type str
- resource type. EB_COS/EB_ES/EB_CLS.
- function_
name str - SCF function name.
- namespace str
- SCF namespace.
- qualifier str
- SCF version and alias.
CkafkaDatahubTaskSourceResourceKafkaParam, CkafkaDatahubTaskSourceResourceKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Source Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Source Resource Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Source Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Source Resource Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Source Resource Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskSourceResourceKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceKafkaParamTableMappingArgs
CkafkaDatahubTaskSourceResourceMariaDbParam, CkafkaDatahubTaskSourceResourceMariaDbParamArgs
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - schema_only|initial, default initial.
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - schema_only|initial, default initial.
- database string
- MariaDB database name, * for all database.
- resource string
- MariaDB connection Id.
- table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode string - schema_only|initial, default initial.
- database str
- MariaDB database name, * for all database.
- resource str
- MariaDB connection Id.
- table str
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_
columns str - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_
mode str - schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - schema_only|initial, default initial.
CkafkaDatahubTaskSourceResourceMongoDbParam, CkafkaDatahubTaskSourceResourceMongoDbParamArgs
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
- collection string
- MongoDB collection.
- copy
Existing boolean - Whether to copy the stock data, the default parameter is true.
- database string
- MongoDB database name.
- resource string
- resource id.
- ip string
- Mongo DB connection ip.
- listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- read
Preference string - Master-slave priority, default master node.
- self
Built boolean - Whether it is a self-built cluster.
- user
Name string - MongoDB database user name.
- collection str
- MongoDB collection.
- copy_
existing bool - Whether to copy the stock data, the default parameter is true.
- database str
- MongoDB database name.
- resource str
- resource id.
- ip str
- Mongo DB connection ip.
- listening_
event str - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_
preference str - Master-slave priority, default master node.
- self_
built bool - Whether it is a self-built cluster.
- user_
name str - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
CkafkaDatahubTaskSourceResourceMySqlParam, CkafkaDatahubTaskSourceResourceMySqlParamArgs
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- Key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- Key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns String - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database String - database name of signal table.
- snapshot
Mode String - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
- database string
- MySQL database name, * is the whole database.
- resource string
- MySQL connection Id.
- table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source stringIncrement Column - the name of the column to be monitored.
- data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table booleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database string - database name of signal table.
- snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement string - TopicRegex, $1, $2.
- database str
- MySQL database name, * is the whole database.
- resource str
- MySQL connection Id.
- table str
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data_
source_ strincrement_ column - the name of the column to be monitored.
- data_
source_ strincrement_ mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_
source_ strmonitor_ mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_
source_ strmonitor_ resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_
source_ strstart_ from - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Ckafkarecord_ mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- ddl_
topic str - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_
cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_
table_ boolregular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key_
columns str - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal_
database str - database name of signal table.
- snapshot_
mode str - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic_
regex str - Regular expression for routing events to specific topics, defaults to (.*).
- topic_
replacement str - TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns String - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database String - database name of signal table.
- snapshot
Mode String - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMappingArgs
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- message type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
CkafkaDatahubTaskSourceResourceMySqlParamDropCls, CkafkaDatahubTaskSourceResourceMySqlParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskSourceResourcePostgreSqlParam, CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs
- Database string
- PostgreSQL database name.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - never|initial, default initial.
- Database string
- PostgreSQL database name.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - never|initial, default initial.
- database String
- PostgreSQL database name.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - never|initial, default initial.
- database string
- PostgreSQL database name.
- plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- resource string
- PostgreSQL connection Id.
- table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode string - never|initial, default initial.
- database str
- PostgreSQL database name.
- plugin_
name str - (decoderbufs/pgoutput), default decoderbufs.
- resource str
- PostgreSQL connection Id.
- table str
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data_
format str - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Ckafkarecord_ mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_
mode str - never|initial, default initial.
- database String
- PostgreSQL database name.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - never|initial, default initial.
CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMappingArgs
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- message type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
CkafkaDatahubTaskSourceResourceScfParam, CkafkaDatahubTaskSourceResourceScfParamArgs
- Function
Name string - SCF function name.
- Batch
Size double - The maximum number of messages sent in each batch, the default is 1000.
- Max
Retries double - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- Function
Name string - SCF function name.
- Batch
Size float64 - The maximum number of messages sent in each batch, the default is 1000.
- Max
Retries float64 - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name String - SCF function name.
- batch
Size Double - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries Double - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name string - SCF function name.
- batch
Size number - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries number - The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function_
name str - SCF function name.
- batch_
size float - The maximum number of messages sent in each batch, the default is 1000.
- max_
retries float - The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name String - SCF function name.
- batch
Size Number - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries Number - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
CkafkaDatahubTaskSourceResourceSqlServerParam, CkafkaDatahubTaskSourceResourceSqlServerParamArgs
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Snapshot
Mode string - schema_only|initial default initial.
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Snapshot
Mode string - schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode String - schema_only|initial default initial.
- database string
- SQLServer database name.
- resource string
- SQLServer connection Id.
- table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode string - schema_only|initial default initial.
- database str
- SQLServer database name.
- resource str
- SQLServer connection Id.
- table str
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot_
mode str - schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode String - schema_only|initial default initial.
CkafkaDatahubTaskSourceResourceTdwParam, CkafkaDatahubTaskSourceResourceTdwParamArgs
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port double - TDW port, default 8099.
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port float64 - TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Double - TDW port, default 8099.
- bid string
- Tdw bid.
- tid string
- Tdw tid.
- is
Domestic boolean - default true.
- tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port number - TDW port, default 8099.
- bid str
- Tdw bid.
- tid str
- Tdw tid.
- is_
domestic bool - default true.
- tdw_
host str - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_
port float - TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Number - TDW port, default 8099.
CkafkaDatahubTaskSourceResourceTopicParam, CkafkaDatahubTaskSourceResourceTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTargetResource, CkafkaDatahubTaskTargetResourceArgs
- Type string
- Resource Type.
- Click
House CkafkaParam Datahub Task Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Param CkafkaDatahub Task Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Param CkafkaDatahub Task Target Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Param CkafkaDatahub Task Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Param CkafkaDatahub Task Target Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Param CkafkaDatahub Task Target Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus CkafkaParam Datahub Task Target Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Param CkafkaDatahub Task Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db CkafkaParam Datahub Task Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db CkafkaParam Datahub Task Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql CkafkaParam Datahub Task Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql CkafkaParam Datahub Task Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Param CkafkaDatahub Task Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server CkafkaParam Datahub Task Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Param CkafkaDatahub Task Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Param CkafkaDatahub Task Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- Click
House CkafkaParam Datahub Task Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- Cls
Param CkafkaDatahub Task Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- Cos
Param CkafkaDatahub Task Target Resource Cos Param - Cos configuration, required when Type is COS.
- Ctsdb
Param CkafkaDatahub Task Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- Dts
Param CkafkaDatahub Task Target Resource Dts Param - Dts configuration, required when Type is DTS.
- Es
Param CkafkaDatahub Task Target Resource Es Param - Es configuration, required when Type is ES.
- Event
Bus CkafkaParam Datahub Task Target Resource Event Bus Param - EB configuration, required when type is EB.
- Kafka
Param CkafkaDatahub Task Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- Maria
Db CkafkaParam Datahub Task Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- Mongo
Db CkafkaParam Datahub Task Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- My
Sql CkafkaParam Datahub Task Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- Postgre
Sql CkafkaParam Datahub Task Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- Scf
Param CkafkaDatahub Task Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- Sql
Server CkafkaParam Datahub Task Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- Tdw
Param CkafkaDatahub Task Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- Topic
Param CkafkaDatahub Task Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- click
House CkafkaParam Datahub Task Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param CkafkaDatahub Task Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos
Param CkafkaDatahub Task Target Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb
Param CkafkaDatahub Task Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param CkafkaDatahub Task Target Resource Dts Param - Dts configuration, required when Type is DTS.
- es
Param CkafkaDatahub Task Target Resource Es Param - Es configuration, required when Type is ES.
- event
Bus CkafkaParam Datahub Task Target Resource Event Bus Param - EB configuration, required when type is EB.
- kafka
Param CkafkaDatahub Task Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria
Db CkafkaParam Datahub Task Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db CkafkaParam Datahub Task Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my
Sql CkafkaParam Datahub Task Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql CkafkaParam Datahub Task Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param CkafkaDatahub Task Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql
Server CkafkaParam Datahub Task Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param CkafkaDatahub Task Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic
Param CkafkaDatahub Task Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- click
House CkafkaParam Datahub Task Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param CkafkaDatahub Task Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos
Param CkafkaDatahub Task Target Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb
Param CkafkaDatahub Task Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param CkafkaDatahub Task Target Resource Dts Param - Dts configuration, required when Type is DTS.
- es
Param CkafkaDatahub Task Target Resource Es Param - Es configuration, required when Type is ES.
- event
Bus CkafkaParam Datahub Task Target Resource Event Bus Param - EB configuration, required when type is EB.
- kafka
Param CkafkaDatahub Task Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria
Db CkafkaParam Datahub Task Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db CkafkaParam Datahub Task Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my
Sql CkafkaParam Datahub Task Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql CkafkaParam Datahub Task Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param CkafkaDatahub Task Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql
Server CkafkaParam Datahub Task Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param CkafkaDatahub Task Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic
Param CkafkaDatahub Task Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- click_
house_ Ckafkaparam Datahub Task Target Resource Click House Param - ClickHouse config, Type CLICKHOUSE requierd.
- cls_
param CkafkaDatahub Task Target Resource Cls Param - Cls configuration, Required when Type is CLS.
- cos_
param CkafkaDatahub Task Target Resource Cos Param - Cos configuration, required when Type is COS.
- ctsdb_
param CkafkaDatahub Task Target Resource Ctsdb Param - Ctsdb configuration, Required when Type is CTSDB.
- dts_
param CkafkaDatahub Task Target Resource Dts Param - Dts configuration, required when Type is DTS.
- es_
param CkafkaDatahub Task Target Resource Es Param - Es configuration, required when Type is ES.
- event_
bus_ Ckafkaparam Datahub Task Target Resource Event Bus Param - EB configuration, required when type is EB.
- kafka_
param CkafkaDatahub Task Target Resource Kafka Param - ckafka configuration, required when Type is KAFKA.
- maria_
db_ Ckafkaparam Datahub Task Target Resource Maria Db Param - MariaDB configuration, Required when Type is MARIADB.
- mongo_
db_ Ckafkaparam Datahub Task Target Resource Mongo Db Param - MongoDB config, Required when Type is MONGODB.
- my_
sql_ Ckafkaparam Datahub Task Target Resource My Sql Param - MySQL configuration, Required when Type is MYSQL.
- postgre_
sql_ Ckafkaparam Datahub Task Target Resource Postgre Sql Param - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_
param CkafkaDatahub Task Target Resource Scf Param - Scf configuration, Required when Type is SCF.
- sql_
server_ Ckafkaparam Datahub Task Target Resource Sql Server Param - SQLServer configuration, Required when Type is SQLSERVER.
- tdw_
param CkafkaDatahub Task Target Resource Tdw Param - Tdw configuration, required when Type is TDW.
- topic_
param CkafkaDatahub Task Target Resource Topic Param - Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- click
House Property MapParam - ClickHouse config, Type CLICKHOUSE requierd.
- cls
Param Property Map - Cls configuration, Required when Type is CLS.
- cos
Param Property Map - Cos configuration, required when Type is COS.
- ctsdb
Param Property Map - Ctsdb configuration, Required when Type is CTSDB.
- dts
Param Property Map - Dts configuration, required when Type is DTS.
- es
Param Property Map - Es configuration, required when Type is ES.
- event
Bus Property MapParam - EB configuration, required when type is EB.
- kafka
Param Property Map - ckafka configuration, required when Type is KAFKA.
- maria
Db Property MapParam - MariaDB configuration, Required when Type is MARIADB.
- mongo
Db Property MapParam - MongoDB config, Required when Type is MONGODB.
- my
Sql Property MapParam - MySQL configuration, Required when Type is MYSQL.
- postgre
Sql Property MapParam - PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf
Param Property Map - Scf configuration, Required when Type is SCF.
- sql
Server Property MapParam - SQLServer configuration, Required when Type is SQLSERVER.
- tdw
Param Property Map - Tdw configuration, required when Type is TDW.
- topic
Param Property Map - Topic configuration, Required when Type is Topic.
CkafkaDatahubTaskTargetResourceClickHouseParam, CkafkaDatahubTaskTargetResourceClickHouseParamArgs
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
List<Ckafka
Datahub Task Target Resource Click House Param Schema> - ClickHouse schema.
- Table string
- ClickHouse table.
- Drop
Cls CkafkaDatahub Task Target Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port double
- ClickHouse port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Type string
- type of table column.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - ClickHouse user name.
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
[]Ckafka
Datahub Task Target Resource Click House Param Schema - ClickHouse schema.
- Table string
- ClickHouse table.
- Drop
Cls CkafkaDatahub Task Target Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port float64
- ClickHouse port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Type string
- type of table column.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas
List<Ckafka
Datahub Task Target Resource Click House Param Schema> - ClickHouse schema.
- table String
- ClickHouse table.
- drop
Cls CkafkaDatahub Task Target Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Double
- ClickHouse port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- type String
- type of table column.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - ClickHouse user name.
- cluster string
- ClickHouse cluster.
- database string
- ClickHouse database name.
- resource string
- resource id.
- schemas
Ckafka
Datahub Task Target Resource Click House Param Schema[] - ClickHouse schema.
- table string
- ClickHouse table.
- drop
Cls CkafkaDatahub Task Target Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip string
- ClickHouse ip.
- password string
- ClickHouse passwd.
- port number
- ClickHouse port.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- type string
- type of table column.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - ClickHouse user name.
- cluster str
- ClickHouse cluster.
- database str
- ClickHouse database name.
- resource str
- resource id.
- schemas
Sequence[Ckafka
Datahub Task Target Resource Click House Param Schema] - ClickHouse schema.
- table str
- ClickHouse table.
- drop_
cls CkafkaDatahub Task Target Resource Click House Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip str
- ClickHouse ip.
- password str
- ClickHouse passwd.
- port float
- ClickHouse port.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- type str
- type of table column.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas List<Property Map>
- ClickHouse schema.
- table String
- ClickHouse table.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Number
- ClickHouse port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- type String
- type of table column.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - ClickHouse user name.
CkafkaDatahubTaskTargetResourceClickHouseParamDropCls, CkafkaDatahubTaskTargetResourceClickHouseParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskTargetResourceClickHouseParamSchema, CkafkaDatahubTaskTargetResourceClickHouseParamSchemaArgs
- Allow
Null bool - Whether the column item is allowed to be empty.
- Column
Name string - column name.
- Json
Key string - The json Key name corresponding to this column.
- Type string
- type of table column.
- Allow
Null bool - Whether the column item is allowed to be empty.
- Column
Name string - column name.
- Json
Key string - The json Key name corresponding to this column.
- Type string
- type of table column.
- allow
Null Boolean - Whether the column item is allowed to be empty.
- column
Name String - column name.
- json
Key String - The json Key name corresponding to this column.
- type String
- type of table column.
- allow
Null boolean - Whether the column item is allowed to be empty.
- column
Name string - column name.
- json
Key string - The json Key name corresponding to this column.
- type string
- type of table column.
- allow_
null bool - Whether the column item is allowed to be empty.
- column_
name str - column name.
- json_
key str - The json Key name corresponding to this column.
- type str
- type of table column.
- allow
Null Boolean - Whether the column item is allowed to be empty.
- column
Name String - column name.
- json
Key String - The json Key name corresponding to this column.
- type String
- type of table column.
CkafkaDatahubTaskTargetResourceClsParam, CkafkaDatahubTaskTargetResourceClsParamArgs
- Decode
Json bool - Whether the produced information is in json format.
- Resource string
- cls id.
- Content
Key string - Required when Decode Json is false.
- Log
Set string - LogSet id.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- Decode
Json bool - Whether the produced information is in json format.
- Resource string
- cls id.
- Content
Key string - Required when Decode Json is false.
- Log
Set string - LogSet id.
- Time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json Boolean - Whether the produced information is in json format.
- resource String
- cls id.
- content
Key String - Required when Decode Json is false.
- log
Set String - LogSet id.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json boolean - Whether the produced information is in json format.
- resource string
- cls id.
- content
Key string - Required when Decode Json is false.
- log
Set string - LogSet id.
- time
Field string - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode_
json bool - Whether the produced information is in json format.
- resource str
- cls id.
- content_
key str - Required when Decode Json is false.
- log_
set str - LogSet id.
- time_
field str - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode
Json Boolean - Whether the produced information is in json format.
- resource String
- cls id.
- content
Key String - Required when Decode Json is false.
- log
Set String - LogSet id.
- time
Field String - Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
CkafkaDatahubTaskTargetResourceCosParam, CkafkaDatahubTaskTargetResourceCosParamArgs
- Bucket
Name string - cos bucket name.
- Region string
- region code.
- Aggregate
Batch doubleSize - The size of aggregated messages MB.
- Aggregate
Interval double - time interval.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- Bucket
Name string - cos bucket name.
- Region string
- region code.
- Aggregate
Batch float64Size - The size of aggregated messages MB.
- Aggregate
Interval float64 - time interval.
- Directory
Time stringFormat - Partition format formatted according to strptime time.
- Format
Output stringType - The file format after message aggregation csv|json.
- Object
Key string - ObjectKey.
- Object
Key stringPrefix - Dumped object directory prefix.
- bucket
Name String - cos bucket name.
- region String
- region code.
- aggregate
Batch DoubleSize - The size of aggregated messages MB.
- aggregate
Interval Double - time interval.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
- bucket
Name string - cos bucket name.
- region string
- region code.
- aggregate
Batch numberSize - The size of aggregated messages MB.
- aggregate
Interval number - time interval.
- directory
Time stringFormat - Partition format formatted according to strptime time.
- format
Output stringType - The file format after message aggregation csv|json.
- object
Key string - ObjectKey.
- object
Key stringPrefix - Dumped object directory prefix.
- bucket_
name str - cos bucket name.
- region str
- region code.
- aggregate_
batch_ floatsize - The size of aggregated messages MB.
- aggregate_
interval float - time interval.
- directory_
time_ strformat - Partition format formatted according to strptime time.
- format_
output_ strtype - The file format after message aggregation csv|json.
- object_
key str - ObjectKey.
- object_
key_ strprefix - Dumped object directory prefix.
- bucket
Name String - cos bucket name.
- region String
- region code.
- aggregate
Batch NumberSize - The size of aggregated messages MB.
- aggregate
Interval Number - time interval.
- directory
Time StringFormat - Partition format formatted according to strptime time.
- format
Output StringType - The file format after message aggregation csv|json.
- object
Key String - ObjectKey.
- object
Key StringPrefix - Dumped object directory prefix.
CkafkaDatahubTaskTargetResourceCtsdbParam, CkafkaDatahubTaskTargetResourceCtsdbParamArgs
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- resource id.
- Ctsdb
Metric string - Ctsdb metric.
- Resource string
- resource id.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- resource id.
- ctsdb
Metric string - Ctsdb metric.
- resource string
- resource id.
- ctsdb_
metric str - Ctsdb metric.
- resource str
- resource id.
- ctsdb
Metric String - Ctsdb metric.
- resource String
- resource id.
CkafkaDatahubTaskTargetResourceDtsParam, CkafkaDatahubTaskTargetResourceDtsParamArgs
- Resource string
- Dts instance Id.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Dts connection ip.
- Port double
- Dts connection port.
- Topic string
- Dts topic.
- Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Resource string
- Dts instance Id.
- Group
Id string - Dts consumer group Id.
- Group
Password string - Dts consumer group passwd.
- Group
User string - Dts account.
- Ip string
- Dts connection ip.
- Port float64
- Dts connection port.
- Topic string
- Dts topic.
- Tran
Sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Dts connection ip.
- port Double
- Dts connection port.
- topic String
- Dts topic.
- tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource string
- Dts instance Id.
- group
Id string - Dts consumer group Id.
- group
Password string - Dts consumer group passwd.
- group
User string - Dts account.
- ip string
- Dts connection ip.
- port number
- Dts connection port.
- topic string
- Dts topic.
- tran
Sql boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource str
- Dts instance Id.
- group_
id str - Dts consumer group Id.
- group_
password str - Dts consumer group passwd.
- group_
user str - Dts account.
- ip str
- Dts connection ip.
- port float
- Dts connection port.
- topic str
- Dts topic.
- tran_
sql bool - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- group
Id String - Dts consumer group Id.
- group
Password String - Dts consumer group passwd.
- group
User String - Dts account.
- ip String
- Dts connection ip.
- port Number
- Dts connection port.
- topic String
- Dts topic.
- tran
Sql Boolean - False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
CkafkaDatahubTaskTargetResourceEsParam, CkafkaDatahubTaskTargetResourceEsParamArgs
- Resource string
- Resource.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls CkafkaDatahub Task Target Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port double
- Es connection port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - Es UserName.
- Resource string
- Resource.
- Content
Key string - key for data in non-json format.
- Database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- Date
Format string - Es date suffix.
- Document
Id stringField - The field name of the document ID value dumped into Es.
- Drop
Cls CkafkaDatahub Task Target Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq - dead letter queue.
- Drop
Invalid boolJson Message - Whether Es discards messages in non-json format.
- Drop
Invalid boolMessage - Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- Index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port float64
- Es connection port.
- Self
Built bool - Whether it is a self-built cluster.
- Service
Vip string - instance vip.
- Uniq
Vpc stringId - instance vpc id.
- User
Name string - Es UserName.
- resource String
- Resource.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls CkafkaDatahub Task Target Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether Es discards the message of parsing failure.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Double
- Es connection port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - Es UserName.
- resource string
- Resource.
- content
Key string - key for data in non-json format.
- database
Primary stringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format string - Es date suffix.
- document
Id stringField - The field name of the document ID value dumped into Es.
- drop
Cls CkafkaDatahub Task Target Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq - dead letter queue.
- drop
Invalid booleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid booleanMessage - Whether Es discards the message of parsing failure.
- index string
- Es index name.
- index
Type string - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- Es Password.
- port number
- Es connection port.
- self
Built boolean - Whether it is a self-built cluster.
- service
Vip string - instance vip.
- uniq
Vpc stringId - instance vpc id.
- user
Name string - Es UserName.
- resource str
- Resource.
- content_
key str - key for data in non-json format.
- database_
primary_ strkey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_
format str - Es date suffix.
- document_
id_ strfield - The field name of the document ID value dumped into Es.
- drop_
cls CkafkaDatahub Task Target Resource Es Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq - dead letter queue.
- drop_
invalid_ booljson_ message - Whether Es discards messages in non-json format.
- drop_
invalid_ boolmessage - Whether Es discards the message of parsing failure.
- index str
- Es index name.
- index_
type str - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- Es Password.
- port float
- Es connection port.
- self_
built bool - Whether it is a self-built cluster.
- service_
vip str - instance vip.
- uniq_
vpc_ strid - instance vpc id.
- user_
name str - Es UserName.
- resource String
- Resource.
- content
Key String - key for data in non-json format.
- database
Primary StringKey - When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date
Format String - Es date suffix.
- document
Id StringField - The field name of the document ID value dumped into Es.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Dlq Property Map - dead letter queue.
- drop
Invalid BooleanJson Message - Whether Es discards messages in non-json format.
- drop
Invalid BooleanMessage - Whether Es discards the message of parsing failure.
- index String
- Es index name.
- index
Type String - Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Number
- Es connection port.
- self
Built Boolean - Whether it is a self-built cluster.
- service
Vip String - instance vip.
- uniq
Vpc StringId - instance vpc id.
- user
Name String - Es UserName.
CkafkaDatahubTaskTargetResourceEsParamDropCls, CkafkaDatahubTaskTargetResourceEsParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskTargetResourceEsParamDropDlq, CkafkaDatahubTaskTargetResourceEsParamDropDlqArgs
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param - Ckafka type dlq.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param Property Map - Ckafka type dlq.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Param Property Map - DIP Topic type dead letter queue.
CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMappingArgs
CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTargetResourceEventBusParam, CkafkaDatahubTaskTargetResourceEventBusParamArgs
- resource str
- instance id.
- self_
built bool - Whether it is a self-built cluster.
- type str
- resource type. EB_COS/EB_ES/EB_CLS.
- function_
name str - SCF function name.
- namespace str
- SCF namespace.
- qualifier str
- SCF version and alias.
CkafkaDatahubTaskTargetResourceKafkaParam, CkafkaDatahubTaskTargetResourceKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Target Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Target Resource Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Target Resource Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Target Resource Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Target Resource Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskTargetResourceKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceKafkaParamTableMappingArgs
CkafkaDatahubTaskTargetResourceMariaDbParam, CkafkaDatahubTaskTargetResourceMariaDbParamArgs
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - schema_only|initial, default initial.
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - schema_only|initial, default initial.
- database string
- MariaDB database name, * for all database.
- resource string
- MariaDB connection Id.
- table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns string - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode string - schema_only|initial, default initial.
- database str
- MariaDB database name, * for all database.
- resource str
- MariaDB connection Id.
- table str
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_
columns str - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_
mode str - schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key
Columns String - Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - schema_only|initial, default initial.
CkafkaDatahubTaskTargetResourceMongoDbParam, CkafkaDatahubTaskTargetResourceMongoDbParamArgs
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- Collection string
- MongoDB collection.
- Copy
Existing bool - Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- Listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- Read
Preference string - Master-slave priority, default master node.
- Self
Built bool - Whether it is a self-built cluster.
- User
Name string - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
- collection string
- MongoDB collection.
- copy
Existing boolean - Whether to copy the stock data, the default parameter is true.
- database string
- MongoDB database name.
- resource string
- resource id.
- ip string
- Mongo DB connection ip.
- listening
Event string - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- read
Preference string - Master-slave priority, default master node.
- self
Built boolean - Whether it is a self-built cluster.
- user
Name string - MongoDB database user name.
- collection str
- MongoDB collection.
- copy_
existing bool - Whether to copy the stock data, the default parameter is true.
- database str
- MongoDB database name.
- resource str
- resource id.
- ip str
- Mongo DB connection ip.
- listening_
event str - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_
preference str - Master-slave priority, default master node.
- self_
built bool - Whether it is a self-built cluster.
- user_
name str - MongoDB database user name.
- collection String
- MongoDB collection.
- copy
Existing Boolean - Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listening
Event String - Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- read
Preference String - Master-slave priority, default master node.
- self
Built Boolean - Whether it is a self-built cluster.
- user
Name String - MongoDB database user name.
CkafkaDatahubTaskTargetResourceMySqlParam, CkafkaDatahubTaskTargetResourceMySqlParamArgs
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- Key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- Data
Source stringIncrement Column - the name of the column to be monitored.
- Data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- Data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- Data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- Data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- Drop
Cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- Include
Query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- Is
Table boolPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- Is
Table boolRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- Key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Signal
Database string - database name of signal table.
- Snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- Topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- Topic
Replacement string - TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns String - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database String - database name of signal table.
- snapshot
Mode String - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
- database string
- MySQL database name, * is the whole database.
- resource string
- MySQL connection Id.
- table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source stringIncrement Column - the name of the column to be monitored.
- data
Source stringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source stringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source stringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source stringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- ddl
Topic string - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content stringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table booleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table booleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns string - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format string - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database string - database name of signal table.
- snapshot
Mode string - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex string - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement string - TopicRegex, $1, $2.
- database str
- MySQL database name, * is the whole database.
- resource str
- MySQL connection Id.
- table str
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data_
source_ strincrement_ column - the name of the column to be monitored.
- data_
source_ strincrement_ mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_
source_ strmonitor_ mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_
source_ strmonitor_ resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_
source_ strstart_ from - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Ckafkarecord_ mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- ddl_
topic str - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_
cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- include_
content_ strchanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_
query bool - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_
table_ boolprefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_
table_ boolregular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key_
columns str - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_
format str - output format, DEFAULT, CANAL_1, CANAL_2.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal_
database str - database name of signal table.
- snapshot_
mode str - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic_
regex str - Regular expression for routing events to specific topics, defaults to (.*).
- topic_
replacement str - TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data
Source StringIncrement Column - the name of the column to be monitored.
- data
Source StringIncrement Mode - TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data
Source StringMonitor Mode - TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data
Source StringMonitor Resource - When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data
Source StringStart From - HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- ddl
Topic String - The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop
Cls Property Map - When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- include
Content StringChanges - If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include
Query Boolean - If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is
Table BooleanPrefix - When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is
Table BooleanRegular - Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key
Columns String - Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output
Format String - output format, DEFAULT, CANAL_1, CANAL_2.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal
Database String - database name of signal table.
- snapshot
Mode String - whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic
Regex String - Regular expression for routing events to specific topics, defaults to (.*).
- topic
Replacement String - TopicRegex, $1, $2.
CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMappingArgs
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- message type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
CkafkaDatahubTaskTargetResourceMySqlParamDropCls, CkafkaDatahubTaskTargetResourceMySqlParamDropClsArgs
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- Drop
Cls stringLog Set - cls LogSet id.
- Drop
Cls stringOwneruin - account.
- Drop
Cls stringRegion - The region where the cls is delivered.
- Drop
Cls stringTopic Id - cls topic.
- Drop
Invalid boolMessage To Cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
- drop
Cls stringLog Set - cls LogSet id.
- drop
Cls stringOwneruin - account.
- drop
Cls stringRegion - The region where the cls is delivered.
- drop
Cls stringTopic Id - cls topic.
- drop
Invalid booleanMessage To Cls - Whether to deliver to cls.
- drop_
cls_ strlog_ set - cls LogSet id.
- drop_
cls_ strowneruin - account.
- drop_
cls_ strregion - The region where the cls is delivered.
- drop_
cls_ strtopic_ id - cls topic.
- drop_
invalid_ boolmessage_ to_ cls - Whether to deliver to cls.
- drop
Cls StringLog Set - cls LogSet id.
- drop
Cls StringOwneruin - account.
- drop
Cls StringRegion - The region where the cls is delivered.
- drop
Cls StringTopic Id - cls topic.
- drop
Invalid BooleanMessage To Cls - Whether to deliver to cls.
CkafkaDatahubTaskTargetResourcePostgreSqlParam, CkafkaDatahubTaskTargetResourcePostgreSqlParamArgs
- Database string
- PostgreSQL database name.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target List<CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - never|initial, default initial.
- Database string
- PostgreSQL database name.
- Plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- Data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- Data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- Data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- Data
Target []CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping - Mapping relationship between tables and messages.
- Drop
Invalid boolMessage - Whether to discard messages that fail to parse, the default is true.
- Is
Table boolRegular - Whether the input table is a regular expression.
- Key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- Record
With boolSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- Snapshot
Mode string - never|initial, default initial.
- database String
- PostgreSQL database name.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping> - Mapping relationship between tables and messages.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - never|initial, default initial.
- database string
- PostgreSQL database name.
- plugin
Name string - (decoderbufs/pgoutput), default decoderbufs.
- resource string
- PostgreSQL connection Id.
- table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format string - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target stringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target stringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping[] - Mapping relationship between tables and messages.
- drop
Invalid booleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table booleanRegular - Whether the input table is a regular expression.
- key
Columns string - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With booleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode string - never|initial, default initial.
- database str
- PostgreSQL database name.
- plugin_
name str - (decoderbufs/pgoutput), default decoderbufs.
- resource str
- PostgreSQL connection Id.
- table str
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data_
format str - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_
target_ strinsert_ mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_
target_ strprimary_ key_ field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_
target_ Sequence[Ckafkarecord_ mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping] - Mapping relationship between tables and messages.
- drop_
invalid_ boolmessage - Whether to discard messages that fail to parse, the default is true.
- is_
table_ boolregular - Whether the input table is a regular expression.
- key_
columns str - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record_
with_ boolschema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_
mode str - never|initial, default initial.
- database String
- PostgreSQL database name.
- plugin
Name String - (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data
Format String - Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data
Target StringInsert Mode - INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data
Target StringPrimary Key Field - When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data
Target List<Property Map>Record Mappings - Mapping relationship between tables and messages.
- drop
Invalid BooleanMessage - Whether to discard messages that fail to parse, the default is true.
- is
Table BooleanRegular - Whether the input table is a regular expression.
- key
Columns String - Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record
With BooleanSchema - If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot
Mode String - never|initial, default initial.
CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMappingArgs
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- Allow
Null bool - Whether the message is allowed to be empty.
- Auto
Increment bool - Whether it is an auto-increment column.
- Column
Name string - Column Name.
- Column
Size string - current ColumnSize.
- Decimal
Digits string - current Column DecimalDigits.
- Default
Value string - Database table default parameters.
- Extra
Info string - Database table extra fields.
- Json
Key string - The key name of the message.
- Type string
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
- allow
Null boolean - Whether the message is allowed to be empty.
- auto
Increment boolean - Whether it is an auto-increment column.
- column
Name string - Column Name.
- column
Size string - current ColumnSize.
- decimal
Digits string - current Column DecimalDigits.
- default
Value string - Database table default parameters.
- extra
Info string - Database table extra fields.
- json
Key string - The key name of the message.
- type string
- message type.
- allow_
null bool - Whether the message is allowed to be empty.
- auto_
increment bool - Whether it is an auto-increment column.
- column_
name str - Column Name.
- column_
size str - current ColumnSize.
- decimal_
digits str - current Column DecimalDigits.
- default_
value str - Database table default parameters.
- extra_
info str - Database table extra fields.
- json_
key str - The key name of the message.
- type str
- message type.
- allow
Null Boolean - Whether the message is allowed to be empty.
- auto
Increment Boolean - Whether it is an auto-increment column.
- column
Name String - Column Name.
- column
Size String - current ColumnSize.
- decimal
Digits String - current Column DecimalDigits.
- default
Value String - Database table default parameters.
- extra
Info String - Database table extra fields.
- json
Key String - The key name of the message.
- type String
- message type.
CkafkaDatahubTaskTargetResourceScfParam, CkafkaDatahubTaskTargetResourceScfParamArgs
- Function
Name string - SCF function name.
- Batch
Size double - The maximum number of messages sent in each batch, the default is 1000.
- Max
Retries double - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- Function
Name string - SCF function name.
- Batch
Size float64 - The maximum number of messages sent in each batch, the default is 1000.
- Max
Retries float64 - The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name String - SCF function name.
- batch
Size Double - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries Double - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name string - SCF function name.
- batch
Size number - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries number - The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function_
name str - SCF function name.
- batch_
size float - The maximum number of messages sent in each batch, the default is 1000.
- max_
retries float - The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- function
Name String - SCF function name.
- batch
Size Number - The maximum number of messages sent in each batch, the default is 1000.
- max
Retries Number - The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
CkafkaDatahubTaskTargetResourceSqlServerParam, CkafkaDatahubTaskTargetResourceSqlServerParamArgs
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Snapshot
Mode string - schema_only|initial default initial.
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- Snapshot
Mode string - schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode String - schema_only|initial default initial.
- database string
- SQLServer database name.
- resource string
- SQLServer connection Id.
- table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode string - schema_only|initial default initial.
- database str
- SQLServer database name.
- resource str
- SQLServer connection Id.
- table str
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot_
mode str - schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot
Mode String - schema_only|initial default initial.
CkafkaDatahubTaskTargetResourceTdwParam, CkafkaDatahubTaskTargetResourceTdwParamArgs
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port double - TDW port, default 8099.
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- Is
Domestic bool - default true.
- Tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- Tdw
Port float64 - TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Double - TDW port, default 8099.
- bid string
- Tdw bid.
- tid string
- Tdw tid.
- is
Domestic boolean - default true.
- tdw
Host string - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port number - TDW port, default 8099.
- bid str
- Tdw bid.
- tid str
- Tdw tid.
- is_
domestic bool - default true.
- tdw_
host str - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_
port float - TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- is
Domestic Boolean - default true.
- tdw
Host String - TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw
Port Number - TDW port, default 8099.
CkafkaDatahubTaskTargetResourceTopicParam, CkafkaDatahubTaskTargetResourceTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformParam, CkafkaDatahubTaskTransformParamArgs
- Analysis
Format string - parsing format, JSON | DELIMITER| REGULAR.
- Content string
- Raw data.
- Failure
Param CkafkaDatahub Task Transform Param Failure Param - Whether to keep parsing failure data.
- Output
Format string - output format.
- Source
Type string - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- Analyse
Results List<CkafkaDatahub Task Transform Param Analyse Result> - Analysis result.
- Filter
Params List<CkafkaDatahub Task Transform Param Filter Param> - filter.
- Map
Params List<CkafkaDatahub Task Transform Param Map Param> - Map.
- Regex string
- delimiter, regular expression.
- Result string
- Test Results.
- Use
Event boolBus - Whether the underlying engine uses eb.
- Analysis
Format string - parsing format, JSON | DELIMITER| REGULAR.
- Content string
- Raw data.
- Failure
Param CkafkaDatahub Task Transform Param Failure Param - Whether to keep parsing failure data.
- Output
Format string - output format.
- Source
Type string - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- Analyse
Results []CkafkaDatahub Task Transform Param Analyse Result - Analysis result.
- Filter
Params []CkafkaDatahub Task Transform Param Filter Param - filter.
- Map
Params []CkafkaDatahub Task Transform Param Map Param - Map.
- Regex string
- delimiter, regular expression.
- Result string
- Test Results.
- Use
Event boolBus - Whether the underlying engine uses eb.
- analysis
Format String - parsing format, JSON | DELIMITER| REGULAR.
- content String
- Raw data.
- failure
Param CkafkaDatahub Task Transform Param Failure Param - Whether to keep parsing failure data.
- output
Format String - output format.
- source
Type String - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyse
Results List<CkafkaDatahub Task Transform Param Analyse Result> - Analysis result.
- filter
Params List<CkafkaDatahub Task Transform Param Filter Param> - filter.
- map
Params List<CkafkaDatahub Task Transform Param Map Param> - Map.
- regex String
- delimiter, regular expression.
- result String
- Test Results.
- use
Event BooleanBus - Whether the underlying engine uses eb.
- analysis
Format string - parsing format, JSON | DELIMITER| REGULAR.
- content string
- Raw data.
- failure
Param CkafkaDatahub Task Transform Param Failure Param - Whether to keep parsing failure data.
- output
Format string - output format.
- source
Type string - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyse
Results CkafkaDatahub Task Transform Param Analyse Result[] - Analysis result.
- filter
Params CkafkaDatahub Task Transform Param Filter Param[] - filter.
- map
Params CkafkaDatahub Task Transform Param Map Param[] - Map.
- regex string
- delimiter, regular expression.
- result string
- Test Results.
- use
Event booleanBus - Whether the underlying engine uses eb.
- analysis_
format str - parsing format, JSON | DELIMITER| REGULAR.
- content str
- Raw data.
- failure_
param CkafkaDatahub Task Transform Param Failure Param - Whether to keep parsing failure data.
- output_
format str - output format.
- source_
type str - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyse_
results Sequence[CkafkaDatahub Task Transform Param Analyse Result] - Analysis result.
- filter_
params Sequence[CkafkaDatahub Task Transform Param Filter Param] - filter.
- map_
params Sequence[CkafkaDatahub Task Transform Param Map Param] - Map.
- regex str
- delimiter, regular expression.
- result str
- Test Results.
- use_
event_ boolbus - Whether the underlying engine uses eb.
- analysis
Format String - parsing format, JSON | DELIMITER| REGULAR.
- content String
- Raw data.
- failure
Param Property Map - Whether to keep parsing failure data.
- output
Format String - output format.
- source
Type String - Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyse
Results List<Property Map> - Analysis result.
- filter
Params List<Property Map> - filter.
- map
Params List<Property Map> - Map.
- regex String
- delimiter, regular expression.
- result String
- Test Results.
- use
Event BooleanBus - Whether the underlying engine uses eb.
CkafkaDatahubTaskTransformParamAnalyseResult, CkafkaDatahubTaskTransformParamAnalyseResultArgs
CkafkaDatahubTaskTransformParamFailureParam, CkafkaDatahubTaskTransformParamFailureParamArgs
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Transform Param Failure Param Kafka Param - Ckafka type dlq.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Param CkafkaDatahub Task Transform Param Failure Param Topic Param - DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Transform Param Failure Param Kafka Param - Ckafka type dlq.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Param CkafkaDatahub Task Transform Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Transform Param Failure Param Kafka Param - Ckafka type dlq.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Param CkafkaDatahub Task Transform Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Transform Param Failure Param Kafka Param - Ckafka type dlq.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Param CkafkaDatahub Task Transform Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
param CkafkaDatahub Task Transform Param Failure Param Kafka Param - Ckafka type dlq.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
param CkafkaDatahub Task Transform Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param Property Map - Ckafka type dlq.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Param Property Map - DIP Topic type dead letter queue.
CkafkaDatahubTaskTransformParamFailureParamKafkaParam, CkafkaDatahubTaskTransformParamFailureParamKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMappingArgs
CkafkaDatahubTaskTransformParamFailureParamTopicParam, CkafkaDatahubTaskTransformParamFailureParamTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformParamFilterParam, CkafkaDatahubTaskTransformParamFilterParamArgs
- key str
- Key.
- match_
mode str - Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
- value str
- Value.
- type str
- REGULAR.
CkafkaDatahubTaskTransformParamMapParam, CkafkaDatahubTaskTransformParamMapParamArgs
CkafkaDatahubTaskTransformsParam, CkafkaDatahubTaskTransformsParamArgs
- Content string
- Raw data.
- Field
Chains List<CkafkaDatahub Task Transforms Param Field Chain> - processing chain.
- Batch
Analyse CkafkaDatahub Task Transforms Param Batch Analyse - data process.
- Failure
Param CkafkaDatahub Task Transforms Param Failure Param - fail process.
- Filter
Params List<CkafkaDatahub Task Transforms Param Filter Param> - filter.
- Keep
Metadata bool - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- Output
Format string - output format, JSON, ROW, default JSON.
- Result string
- result.
- Row
Param CkafkaDatahub Task Transforms Param Row Param - The output format is ROW Required.
- Source
Type string - data source.
- Content string
- Raw data.
- Field
Chains []CkafkaDatahub Task Transforms Param Field Chain - processing chain.
- Batch
Analyse CkafkaDatahub Task Transforms Param Batch Analyse - data process.
- Failure
Param CkafkaDatahub Task Transforms Param Failure Param - fail process.
- Filter
Params []CkafkaDatahub Task Transforms Param Filter Param - filter.
- Keep
Metadata bool - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- Output
Format string - output format, JSON, ROW, default JSON.
- Result string
- result.
- Row
Param CkafkaDatahub Task Transforms Param Row Param - The output format is ROW Required.
- Source
Type string - data source.
- content String
- Raw data.
- field
Chains List<CkafkaDatahub Task Transforms Param Field Chain> - processing chain.
- batch
Analyse CkafkaDatahub Task Transforms Param Batch Analyse - data process.
- failure
Param CkafkaDatahub Task Transforms Param Failure Param - fail process.
- filter
Params List<CkafkaDatahub Task Transforms Param Filter Param> - filter.
- keep
Metadata Boolean - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- output
Format String - output format, JSON, ROW, default JSON.
- result String
- result.
- row
Param CkafkaDatahub Task Transforms Param Row Param - The output format is ROW Required.
- source
Type String - data source.
- content string
- Raw data.
- field
Chains CkafkaDatahub Task Transforms Param Field Chain[] - processing chain.
- batch
Analyse CkafkaDatahub Task Transforms Param Batch Analyse - data process.
- failure
Param CkafkaDatahub Task Transforms Param Failure Param - fail process.
- filter
Params CkafkaDatahub Task Transforms Param Filter Param[] - filter.
- keep
Metadata boolean - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- output
Format string - output format, JSON, ROW, default JSON.
- result string
- result.
- row
Param CkafkaDatahub Task Transforms Param Row Param - The output format is ROW Required.
- source
Type string - data source.
- content str
- Raw data.
- field_
chains Sequence[CkafkaDatahub Task Transforms Param Field Chain] - processing chain.
- batch_
analyse CkafkaDatahub Task Transforms Param Batch Analyse - data process.
- failure_
param CkafkaDatahub Task Transforms Param Failure Param - fail process.
- filter_
params Sequence[CkafkaDatahub Task Transforms Param Filter Param] - filter.
- keep_
metadata bool - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- output_
format str - output format, JSON, ROW, default JSON.
- result str
- result.
- row_
param CkafkaDatahub Task Transforms Param Row Param - The output format is ROW Required.
- source_
type str - data source.
- content String
- Raw data.
- field
Chains List<Property Map> - processing chain.
- batch
Analyse Property Map - data process.
- failure
Param Property Map - fail process.
- filter
Params List<Property Map> - filter.
- keep
Metadata Boolean - Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- output
Format String - output format, JSON, ROW, default JSON.
- result String
- result.
- row
Param Property Map - The output format is ROW Required.
- source
Type String - data source.
CkafkaDatahubTaskTransformsParamBatchAnalyse, CkafkaDatahubTaskTransformsParamBatchAnalyseArgs
- Format string
- ONE BY ONE single output, MERGE combined output.
- Format string
- ONE BY ONE single output, MERGE combined output.
- format String
- ONE BY ONE single output, MERGE combined output.
- format string
- ONE BY ONE single output, MERGE combined output.
- format str
- ONE BY ONE single output, MERGE combined output.
- format String
- ONE BY ONE single output, MERGE combined output.
CkafkaDatahubTaskTransformsParamFailureParam, CkafkaDatahubTaskTransformsParamFailureParamArgs
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Transforms Param Failure Param Kafka Param - Ckafka type dlq.
- Max
Retry doubleAttempts - retry times.
- Retry
Interval double - retry interval.
- Topic
Param CkafkaDatahub Task Transforms Param Failure Param Topic Param - DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- Dlq
Type string - dlq type, CKAFKA|TOPIC.
- Kafka
Param CkafkaDatahub Task Transforms Param Failure Param Kafka Param - Ckafka type dlq.
- Max
Retry float64Attempts - retry times.
- Retry
Interval float64 - retry interval.
- Topic
Param CkafkaDatahub Task Transforms Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Transforms Param Failure Param Kafka Param - Ckafka type dlq.
- max
Retry DoubleAttempts - retry times.
- retry
Interval Double - retry interval.
- topic
Param CkafkaDatahub Task Transforms Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type string - dlq type, CKAFKA|TOPIC.
- kafka
Param CkafkaDatahub Task Transforms Param Failure Param Kafka Param - Ckafka type dlq.
- max
Retry numberAttempts - retry times.
- retry
Interval number - retry interval.
- topic
Param CkafkaDatahub Task Transforms Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_
type str - dlq type, CKAFKA|TOPIC.
- kafka_
param CkafkaDatahub Task Transforms Param Failure Param Kafka Param - Ckafka type dlq.
- max_
retry_ floatattempts - retry times.
- retry_
interval float - retry interval.
- topic_
param CkafkaDatahub Task Transforms Param Failure Param Topic Param - DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq
Type String - dlq type, CKAFKA|TOPIC.
- kafka
Param Property Map - Ckafka type dlq.
- max
Retry NumberAttempts - retry times.
- retry
Interval Number - retry interval.
- topic
Param Property Map - DIP Topic type dead letter queue.
CkafkaDatahubTaskTransformsParamFailureParamKafkaParam, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamArgs
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num double - the partition num of the topic.
- Qps
Limit double - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time double - when Offset type timestamp is required.
- Table
Mappings List<CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id double - Zone ID.
- Resource string
- instance resource.
- Self
Built bool - whether the cluster is built by yourself instead of cloud product.
- Compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- Enable
Toleration bool - enable dead letter queue.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- Partition
Num float64 - the partition num of the topic.
- Qps
Limit float64 - Qps(query per seconds) limit.
- Resource
Name string - instance name.
- Start
Time float64 - when Offset type timestamp is required.
- Table
Mappings []CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping - maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- Topic
Id string - Topic ID.
- Use
Auto boolCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- Use
Table boolMapping - whether to use multi table.
- Zone
Id float64 - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Double - the partition num of the topic.
- qps
Limit Double - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Double - when Offset type timestamp is required.
- table
Mappings List<CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Double - Zone ID.
- resource string
- instance resource.
- self
Built boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type string - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration boolean - enable dead letter queue.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num number - the partition num of the topic.
- qps
Limit number - Qps(query per seconds) limit.
- resource
Name string - instance name.
- start
Time number - when Offset type timestamp is required.
- table
Mappings CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping[] - maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topic
Id string - Topic ID.
- use
Auto booleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table booleanMapping - whether to use multi table.
- zone
Id number - Zone ID.
- resource str
- instance resource.
- self_
built bool - whether the cluster is built by yourself instead of cloud product.
- compression_
type str - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_
toleration bool - enable dead letter queue.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_
num float - the partition num of the topic.
- qps_
limit float - Qps(query per seconds) limit.
- resource_
name str - instance name.
- start_
time float - when Offset type timestamp is required.
- table_
mappings Sequence[CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping] - maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_
id str - Topic ID.
- use_
auto_ boolcreate_ topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_
table_ boolmapping - whether to use multi table.
- zone_
id float - Zone ID.
- resource String
- instance resource.
- self
Built Boolean - whether the cluster is built by yourself instead of cloud product.
- compression
Type String - Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable
Toleration Boolean - enable dead letter queue.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition
Num Number - the partition num of the topic.
- qps
Limit Number - Qps(query per seconds) limit.
- resource
Name String - instance name.
- start
Time Number - when Offset type timestamp is required.
- table
Mappings List<Property Map> - maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topic
Id String - Topic ID.
- use
Auto BooleanCreate Topic - Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use
Table BooleanMapping - whether to use multi table.
- zone
Id Number - Zone ID.
CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMappingArgs
CkafkaDatahubTaskTransformsParamFailureParamTopicParam, CkafkaDatahubTaskTransformsParamFailureParamTopicParamArgs
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- Compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- Msg
Multiple float64 - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- Offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- Start
Time float64 - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- Topic
Id string - Topic TopicId.
- Use
Auto boolCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Double - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Double - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compression
Type string - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type string - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id string - Topic TopicId.
- use
Auto booleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_
type str - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_
multiple float - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_
type str - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_
time float - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_
id str - Topic TopicId.
- use_
auto_ boolcreate_ topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compression
Type String - Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg
Multiple Number - 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset
Type String - Offset type, initial position earliest, latest position latest, time point position timestamp.
- start
Time Number - It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic
Id String - Topic TopicId.
- use
Auto BooleanCreate Topic - whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformsParamFieldChain, CkafkaDatahubTaskTransformsParamFieldChainArgs
- Analyse
Ckafka
Datahub Task Transforms Param Field Chain Analyse - analyze.
- Analyse
Json stringResult - Parsing results in JSON format.
- Analyse
Results List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result> - Analysis result.
- Result string
- Test Results.
- SMTs
List<Ckafka
Datahub Task Transforms Param Field Chain SMT> - data processing.
- Secondary
Analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse - secondary analysis.
- Secondary
Analyse stringJson Result - Secondary parsing results in JSON format.
- Secondary
Analyse List<CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result> - Secondary Analysis Results.
- Analyse
Ckafka
Datahub Task Transforms Param Field Chain Analyse - analyze.
- Analyse
Json stringResult - Parsing results in JSON format.
- Analyse
Results []CkafkaDatahub Task Transforms Param Field Chain Analyse Result - Analysis result.
- Result string
- Test Results.
- SMTs
[]Ckafka
Datahub Task Transforms Param Field Chain SMT - data processing.
- Secondary
Analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse - secondary analysis.
- Secondary
Analyse stringJson Result - Secondary parsing results in JSON format.
- Secondary
Analyse []CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result - Secondary Analysis Results.
- analyse
Ckafka
Datahub Task Transforms Param Field Chain Analyse - analyze.
- analyse
Json StringResult - Parsing results in JSON format.
- analyse
Results List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result> - Analysis result.
- result String
- Test Results.
- s
MTs List<CkafkaDatahub Task Transforms Param Field Chain SMT> - data processing.
- secondary
Analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse - secondary analysis.
- secondary
Analyse StringJson Result - Secondary parsing results in JSON format.
- secondary
Analyse List<CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result> - Secondary Analysis Results.
- analyse
Ckafka
Datahub Task Transforms Param Field Chain Analyse - analyze.
- analyse
Json stringResult - Parsing results in JSON format.
- analyse
Results CkafkaDatahub Task Transforms Param Field Chain Analyse Result[] - Analysis result.
- result string
- Test Results.
- s
MTs CkafkaDatahub Task Transforms Param Field Chain SMT[] - data processing.
- secondary
Analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse - secondary analysis.
- secondary
Analyse stringJson Result - Secondary parsing results in JSON format.
- secondary
Analyse CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result[] - Secondary Analysis Results.
- analyse
Ckafka
Datahub Task Transforms Param Field Chain Analyse - analyze.
- analyse_
json_ strresult - Parsing results in JSON format.
- analyse_
results Sequence[CkafkaDatahub Task Transforms Param Field Chain Analyse Result] - Analysis result.
- result str
- Test Results.
- s_
mts Sequence[CkafkaDatahub Task Transforms Param Field Chain SMT] - data processing.
- secondary_
analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse - secondary analysis.
- secondary_
analyse_ strjson_ result - Secondary parsing results in JSON format.
- secondary_
analyse_ Sequence[Ckafkaresults Datahub Task Transforms Param Field Chain Secondary Analyse Result] - Secondary Analysis Results.
- analyse Property Map
- analyze.
- analyse
Json StringResult - Parsing results in JSON format.
- analyse
Results List<Property Map> - Analysis result.
- result String
- Test Results.
- s
MTs List<Property Map> - data processing.
- secondary
Analyse Property Map - secondary analysis.
- secondary
Analyse StringJson Result - Secondary parsing results in JSON format.
- secondary
Analyse List<Property Map>Results - Secondary Analysis Results.
CkafkaDatahubTaskTransformsParamFieldChainAnalyse, CkafkaDatahubTaskTransformsParamFieldChainAnalyseArgs
- Format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- Input
Value string - KEY to be processed again - KEY expression.
- Input
Value stringType - KEY to be processed again - mode.
- Regex string
- delimiter, regular expression.
- Format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- Input
Value string - KEY to be processed again - KEY expression.
- Input
Value stringType - KEY to be processed again - mode.
- Regex string
- delimiter, regular expression.
- format String
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- input
Value String - KEY to be processed again - KEY expression.
- input
Value StringType - KEY to be processed again - mode.
- regex String
- delimiter, regular expression.
- format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- input
Value string - KEY to be processed again - KEY expression.
- input
Value stringType - KEY to be processed again - mode.
- regex string
- delimiter, regular expression.
- format str
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- input_
value str - KEY to be processed again - KEY expression.
- input_
value_ strtype - KEY to be processed again - mode.
- regex str
- delimiter, regular expression.
- format String
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- input
Value String - KEY to be processed again - KEY expression.
- input
Value StringType - KEY to be processed again - mode.
- regex String
- delimiter, regular expression.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultArgs
- Key string
- key.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- value.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process.
- Value
Operates List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate> - VALUE process chain.
- Key string
- key.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- value.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process.
- Value
Operates []CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process chain.
- key String
- key.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- value.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process.
- value
Operates List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate> - VALUE process chain.
- key string
- key.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value string - OriginalValue.
- value string
- value.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process.
- value
Operates CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate[] - VALUE process chain.
- key str
- key.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_
type str - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_
value str - OriginalValue.
- value str
- value.
- value_
operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate - VALUE process.
- value_
operates Sequence[CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate] - VALUE process chain.
- key String
- key.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- value.
- value
Operate Property Map - VALUE process.
- value
Operates List<Property Map> - VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateArgs
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode - Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result String
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode - Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result string
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode - Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json_
path_ Ckafkareplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_
v CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex_
replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result str
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url_
decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- json
Path Property MapReplace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V Property Map - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace Property Map - Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- url
Decode Property Map - Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDateArgs
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
- format string
- Time format.
- target
Type string - input type, string|unix.
- time
Zone string - default GMT+8.
- format str
- Time format.
- target_
type str - input type, string|unix.
- time_
zone str - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKVArgs
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_
original_ strkey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplitArgs
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstrArgs
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecodeArgs
- Charset
Name string - code.
- Charset
Name string - code.
- charset
Name String - code.
- charset
Name string - code.
- charset_
name str - code.
- charset
Name String - code.
CkafkaDatahubTaskTransformsParamFieldChainSMT, CkafkaDatahubTaskTransformsParamFieldChainSMTArgs
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- VALUE.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process.
- Value
Operates List<CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate> - VALUE process chain.
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- VALUE.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process.
- Value
Operates []CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- VALUE.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process.
- value
Operates List<CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate> - VALUE process chain.
- key string
- KEY.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value string - OriginalValue.
- value string
- VALUE.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process.
- value
Operates CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate[] - VALUE process chain.
- key str
- KEY.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_
type str - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_
value str - OriginalValue.
- value str
- VALUE.
- value_
operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate - VALUE process.
- value_
operates Sequence[CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate] - VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- VALUE.
- value
Operate Property Map - VALUE process.
- value
Operates List<Property Map> - VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateArgs
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode - Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Replace - replace, TYPE=REPLACE is required.
- result String
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode - Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Replace - replace, TYPE=REPLACE is required.
- result string
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode - Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Date - Time conversion, required when TYPE=DATE.
- json_
path_ Ckafkareplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_
v CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex_
replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Replace - replace, TYPE=REPLACE is required.
- result str
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain SMTValue Operate Substr - Substr, TYPE=SUBSTR is required.
- url_
decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- json
Path Property MapReplace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V Property Map - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace Property Map - Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- url
Decode Property Map - Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDateArgs
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
- format string
- Time format.
- target
Type string - input type, string|unix.
- time
Zone string - default GMT+8.
- format str
- Time format.
- target_
type str - input type, string|unix.
- time_
zone str - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKVArgs
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_
original_ strkey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplitArgs
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstrArgs
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecodeArgs
- Charset
Name string - code.
- Charset
Name string - code.
- charset
Name String - code.
- charset
Name string - code.
- charset_
name str - code.
- charset
Name String - code.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyse, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseArgs
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultArgs
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- VALUE.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process.
- Value
Operates List<CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate> - VALUE process chain.
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- Scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- Original
Value string - OriginalValue.
- Value string
- VALUE.
- Value
Operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process.
- Value
Operates []CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- VALUE.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process.
- value
Operates List<CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate> - VALUE process chain.
- key string
- KEY.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type string - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value string - OriginalValue.
- value string
- VALUE.
- value
Operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process.
- value
Operates CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate[] - VALUE process chain.
- key str
- KEY.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_
type str - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_
value str - OriginalValue.
- value str
- VALUE.
- value_
operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate - VALUE process.
- value_
operates Sequence[CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate] - VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme
Type String - data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original
Value String - OriginalValue.
- value String
- VALUE.
- value
Operate Property Map - VALUE process.
- value
Operates List<Property Map> - VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateArgs
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode - Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- Json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- Regex
Replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- Url
Decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result String
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode - Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json
Path CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result string
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url
Decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode - Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date - Time conversion, required when TYPE=DATE.
- json_
path_ Ckafkareplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_
v CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV - Key-value secondary analysis, must be passed when TYPE=KV.
- regex_
replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace - Regular replacement, required when TYPE=REGEX REPLACE.
- replace
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace - replace, TYPE=REPLACE is required.
- result str
- result.
- split
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split - The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
Ckafka
Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr - Substr, TYPE=SUBSTR is required.
- url_
decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode - Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- json
Path Property MapReplace - Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k
V Property Map - Key-value secondary analysis, must be passed when TYPE=KV.
- regex
Replace Property Map - Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- url
Decode Property Map - Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDateArgs
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- Format string
- Time format.
- Target
Type string - input type, string|unix.
- Time
Zone string - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
- format string
- Time format.
- target
Type string - input type, string|unix.
- time
Zone string - default GMT+8.
- format str
- Time format.
- target_
type str - input type, string|unix.
- time_
zone str - default GMT+8.
- format String
- Time format.
- target
Type String - input type, string|unix.
- time
Zone String - default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKVArgs
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- Keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keep
Original stringKey - Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_
original_ strkey - Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keep
Original StringKey - Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplaceArgs
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplitArgs
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstrArgs
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecodeArgs
- Charset
Name string - code.
- Charset
Name string - code.
- charset
Name String - code.
- charset
Name string - code.
- charset_
name str - code.
- charset
Name String - code.
CkafkaDatahubTaskTransformsParamFilterParam, CkafkaDatahubTaskTransformsParamFilterParamArgs
- key str
- Key.
- match_
mode str - Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
- value str
- Value.
- type str
- REGULAR.
CkafkaDatahubTaskTransformsParamRowParam, CkafkaDatahubTaskTransformsParamRowParamArgs
- Row
Content string - row content, KEY_VALUE, VALUE.
- Entry
Delimiter string - delimiter.
- Key
Value stringDelimiter - key, value delimiter.
- Row
Content string - row content, KEY_VALUE, VALUE.
- Entry
Delimiter string - delimiter.
- Key
Value stringDelimiter - key, value delimiter.
- row
Content String - row content, KEY_VALUE, VALUE.
- entry
Delimiter String - delimiter.
- key
Value StringDelimiter - key, value delimiter.
- row
Content string - row content, KEY_VALUE, VALUE.
- entry
Delimiter string - delimiter.
- key
Value stringDelimiter - key, value delimiter.
- row_
content str - row content, KEY_VALUE, VALUE.
- entry_
delimiter str - delimiter.
- key_
value_ strdelimiter - key, value delimiter.
- row
Content String - row content, KEY_VALUE, VALUE.
- entry
Delimiter String - delimiter.
- key
Value StringDelimiter - key, value delimiter.
Import
ckafka datahub_task can be imported using the id, e.g.
$ pulumi import tencentcloud:index/ckafkaDatahubTask:CkafkaDatahubTask datahub_task datahub_task_id
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the
tencentcloud
Terraform Provider.