published on Friday, Mar 6, 2026 by tencentcloudstack
published on Friday, Mar 6, 2026 by tencentcloudstack
Provides a resource to create a CLS data transform
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const logsetSrc = new tencentcloud.ClsLogset("logset_src", {
logsetName: "tf-example-src",
tags: {
createdBy: "terraform",
},
});
const topicSrc = new tencentcloud.ClsTopic("topic_src", {
topicName: "tf-example_src",
logsetId: logsetSrc.clsLogsetId,
autoSplit: false,
maxSplitPartitions: 20,
partitionCount: 1,
period: 10,
storageType: "hot",
tags: {
createdBy: "terraform",
},
});
const logsetDst = new tencentcloud.ClsLogset("logset_dst", {
logsetName: "tf-example-dst",
tags: {
createdBy: "terraform",
},
});
const topicDst = new tencentcloud.ClsTopic("topic_dst", {
topicName: "tf-example-dst",
logsetId: logsetDst.clsLogsetId,
autoSplit: false,
maxSplitPartitions: 20,
partitionCount: 1,
period: 10,
storageType: "hot",
tags: {
createdBy: "terraform",
},
});
const example = new tencentcloud.ClsDataTransform("example", {
funcType: 1,
srcTopicId: topicSrc.clsTopicId,
name: "tf-example",
etlContent: "ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
taskType: 3,
enableFlag: 1,
dstResources: [{
topicId: topicDst.clsTopicId,
alias: "iac-test-dst",
}],
});
import pulumi
import pulumi_tencentcloud as tencentcloud
logset_src = tencentcloud.ClsLogset("logset_src",
logset_name="tf-example-src",
tags={
"createdBy": "terraform",
})
topic_src = tencentcloud.ClsTopic("topic_src",
topic_name="tf-example_src",
logset_id=logset_src.cls_logset_id,
auto_split=False,
max_split_partitions=20,
partition_count=1,
period=10,
storage_type="hot",
tags={
"createdBy": "terraform",
})
logset_dst = tencentcloud.ClsLogset("logset_dst",
logset_name="tf-example-dst",
tags={
"createdBy": "terraform",
})
topic_dst = tencentcloud.ClsTopic("topic_dst",
topic_name="tf-example-dst",
logset_id=logset_dst.cls_logset_id,
auto_split=False,
max_split_partitions=20,
partition_count=1,
period=10,
storage_type="hot",
tags={
"createdBy": "terraform",
})
example = tencentcloud.ClsDataTransform("example",
func_type=1,
src_topic_id=topic_src.cls_topic_id,
name="tf-example",
etl_content="ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
task_type=3,
enable_flag=1,
dst_resources=[{
"topic_id": topic_dst.cls_topic_id,
"alias": "iac-test-dst",
}])
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
logsetSrc, err := tencentcloud.NewClsLogset(ctx, "logset_src", &tencentcloud.ClsLogsetArgs{
LogsetName: pulumi.String("tf-example-src"),
Tags: pulumi.StringMap{
"createdBy": pulumi.String("terraform"),
},
})
if err != nil {
return err
}
topicSrc, err := tencentcloud.NewClsTopic(ctx, "topic_src", &tencentcloud.ClsTopicArgs{
TopicName: pulumi.String("tf-example_src"),
LogsetId: logsetSrc.ClsLogsetId,
AutoSplit: pulumi.Bool(false),
MaxSplitPartitions: pulumi.Float64(20),
PartitionCount: pulumi.Float64(1),
Period: pulumi.Float64(10),
StorageType: pulumi.String("hot"),
Tags: pulumi.StringMap{
"createdBy": pulumi.String("terraform"),
},
})
if err != nil {
return err
}
logsetDst, err := tencentcloud.NewClsLogset(ctx, "logset_dst", &tencentcloud.ClsLogsetArgs{
LogsetName: pulumi.String("tf-example-dst"),
Tags: pulumi.StringMap{
"createdBy": pulumi.String("terraform"),
},
})
if err != nil {
return err
}
topicDst, err := tencentcloud.NewClsTopic(ctx, "topic_dst", &tencentcloud.ClsTopicArgs{
TopicName: pulumi.String("tf-example-dst"),
LogsetId: logsetDst.ClsLogsetId,
AutoSplit: pulumi.Bool(false),
MaxSplitPartitions: pulumi.Float64(20),
PartitionCount: pulumi.Float64(1),
Period: pulumi.Float64(10),
StorageType: pulumi.String("hot"),
Tags: pulumi.StringMap{
"createdBy": pulumi.String("terraform"),
},
})
if err != nil {
return err
}
_, err = tencentcloud.NewClsDataTransform(ctx, "example", &tencentcloud.ClsDataTransformArgs{
FuncType: pulumi.Float64(1),
SrcTopicId: topicSrc.ClsTopicId,
Name: pulumi.String("tf-example"),
EtlContent: pulumi.String("ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")"),
TaskType: pulumi.Float64(3),
EnableFlag: pulumi.Float64(1),
DstResources: tencentcloud.ClsDataTransformDstResourceArray{
&tencentcloud.ClsDataTransformDstResourceArgs{
TopicId: topicDst.ClsTopicId,
Alias: pulumi.String("iac-test-dst"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var logsetSrc = new Tencentcloud.ClsLogset("logset_src", new()
{
LogsetName = "tf-example-src",
Tags =
{
{ "createdBy", "terraform" },
},
});
var topicSrc = new Tencentcloud.ClsTopic("topic_src", new()
{
TopicName = "tf-example_src",
LogsetId = logsetSrc.ClsLogsetId,
AutoSplit = false,
MaxSplitPartitions = 20,
PartitionCount = 1,
Period = 10,
StorageType = "hot",
Tags =
{
{ "createdBy", "terraform" },
},
});
var logsetDst = new Tencentcloud.ClsLogset("logset_dst", new()
{
LogsetName = "tf-example-dst",
Tags =
{
{ "createdBy", "terraform" },
},
});
var topicDst = new Tencentcloud.ClsTopic("topic_dst", new()
{
TopicName = "tf-example-dst",
LogsetId = logsetDst.ClsLogsetId,
AutoSplit = false,
MaxSplitPartitions = 20,
PartitionCount = 1,
Period = 10,
StorageType = "hot",
Tags =
{
{ "createdBy", "terraform" },
},
});
var example = new Tencentcloud.ClsDataTransform("example", new()
{
FuncType = 1,
SrcTopicId = topicSrc.ClsTopicId,
Name = "tf-example",
EtlContent = "ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
TaskType = 3,
EnableFlag = 1,
DstResources = new[]
{
new Tencentcloud.Inputs.ClsDataTransformDstResourceArgs
{
TopicId = topicDst.ClsTopicId,
Alias = "iac-test-dst",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.ClsLogset;
import com.pulumi.tencentcloud.ClsLogsetArgs;
import com.pulumi.tencentcloud.ClsTopic;
import com.pulumi.tencentcloud.ClsTopicArgs;
import com.pulumi.tencentcloud.ClsDataTransform;
import com.pulumi.tencentcloud.ClsDataTransformArgs;
import com.pulumi.tencentcloud.inputs.ClsDataTransformDstResourceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var logsetSrc = new ClsLogset("logsetSrc", ClsLogsetArgs.builder()
.logsetName("tf-example-src")
.tags(Map.of("createdBy", "terraform"))
.build());
var topicSrc = new ClsTopic("topicSrc", ClsTopicArgs.builder()
.topicName("tf-example_src")
.logsetId(logsetSrc.clsLogsetId())
.autoSplit(false)
.maxSplitPartitions(20.0)
.partitionCount(1.0)
.period(10.0)
.storageType("hot")
.tags(Map.of("createdBy", "terraform"))
.build());
var logsetDst = new ClsLogset("logsetDst", ClsLogsetArgs.builder()
.logsetName("tf-example-dst")
.tags(Map.of("createdBy", "terraform"))
.build());
var topicDst = new ClsTopic("topicDst", ClsTopicArgs.builder()
.topicName("tf-example-dst")
.logsetId(logsetDst.clsLogsetId())
.autoSplit(false)
.maxSplitPartitions(20.0)
.partitionCount(1.0)
.period(10.0)
.storageType("hot")
.tags(Map.of("createdBy", "terraform"))
.build());
var example = new ClsDataTransform("example", ClsDataTransformArgs.builder()
.funcType(1.0)
.srcTopicId(topicSrc.clsTopicId())
.name("tf-example")
.etlContent("ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")")
.taskType(3.0)
.enableFlag(1.0)
.dstResources(ClsDataTransformDstResourceArgs.builder()
.topicId(topicDst.clsTopicId())
.alias("iac-test-dst")
.build())
.build());
}
}
resources:
logsetSrc:
type: tencentcloud:ClsLogset
name: logset_src
properties:
logsetName: tf-example-src
tags:
createdBy: terraform
topicSrc:
type: tencentcloud:ClsTopic
name: topic_src
properties:
topicName: tf-example_src
logsetId: ${logsetSrc.clsLogsetId}
autoSplit: false
maxSplitPartitions: 20
partitionCount: 1
period: 10
storageType: hot
tags:
createdBy: terraform
logsetDst:
type: tencentcloud:ClsLogset
name: logset_dst
properties:
logsetName: tf-example-dst
tags:
createdBy: terraform
topicDst:
type: tencentcloud:ClsTopic
name: topic_dst
properties:
topicName: tf-example-dst
logsetId: ${logsetDst.clsLogsetId}
autoSplit: false
maxSplitPartitions: 20
partitionCount: 1
period: 10
storageType: hot
tags:
createdBy: terraform
example:
type: tencentcloud:ClsDataTransform
properties:
funcType: 1
srcTopicId: ${topicSrc.clsTopicId}
name: tf-example
etlContent: ext_sep("content", "f1, f2, f3", sep=",", quote="", restrict=False, mode="overwrite")fields_drop("content")
taskType: 3
enableFlag: 1
dstResources:
- topicId: ${topicDst.clsTopicId}
alias: iac-test-dst
Create ClsDataTransform Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new ClsDataTransform(name: string, args: ClsDataTransformArgs, opts?: CustomResourceOptions);@overload
def ClsDataTransform(resource_name: str,
args: ClsDataTransformArgs,
opts: Optional[ResourceOptions] = None)
@overload
def ClsDataTransform(resource_name: str,
opts: Optional[ResourceOptions] = None,
etl_content: Optional[str] = None,
task_type: Optional[float] = None,
src_topic_id: Optional[str] = None,
func_type: Optional[float] = None,
failure_log_key: Optional[str] = None,
enable_flag: Optional[float] = None,
env_infos: Optional[Sequence[ClsDataTransformEnvInfoArgs]] = None,
dst_resources: Optional[Sequence[ClsDataTransformDstResourceArgs]] = None,
backup_give_up_data: Optional[bool] = None,
data_transform_type: Optional[float] = None,
has_services_log: Optional[float] = None,
keep_failure_log: Optional[float] = None,
name: Optional[str] = None,
process_from_timestamp: Optional[float] = None,
process_to_timestamp: Optional[float] = None,
data_transform_sql_data_sources: Optional[Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]] = None,
cls_data_transform_id: Optional[str] = None)func NewClsDataTransform(ctx *Context, name string, args ClsDataTransformArgs, opts ...ResourceOption) (*ClsDataTransform, error)public ClsDataTransform(string name, ClsDataTransformArgs args, CustomResourceOptions? opts = null)
public ClsDataTransform(String name, ClsDataTransformArgs args)
public ClsDataTransform(String name, ClsDataTransformArgs args, CustomResourceOptions options)
type: tencentcloud:ClsDataTransform
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args ClsDataTransformArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args ClsDataTransformArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args ClsDataTransformArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args ClsDataTransformArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args ClsDataTransformArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
ClsDataTransform Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The ClsDataTransform resource accepts the following input properties:
- Etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - Func
Type double - Task type.
1: Specify the theme;2: Dynamic creation. - Src
Topic stringId - Source topic ID.
- Task
Type double - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - Backup
Give boolUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - Cls
Data stringTransform Id - ID of the resource.
- Data
Transform List<ClsSql Data Sources Data Transform Data Transform Sql Data Source> - Associated data source information.
- Data
Transform doubleType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - Dst
Resources List<ClsData Transform Dst Resource> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - Enable
Flag double - Task enable flag.
1: enable,2: disable, Default is1. - Env
Infos List<ClsData Transform Env Info> - Set environment variables.
- Failure
Log stringKey - Field name for failure logs.
- Has
Services doubleLog - Whether to enable service log delivery.
1: disable;2: enable. - Keep
Failure doubleLog - Keep failure log status.
1: do not keep (default);2: keep. - Name string
- Task name.
- Process
From doubleTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- Process
To doubleTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- Etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - Func
Type float64 - Task type.
1: Specify the theme;2: Dynamic creation. - Src
Topic stringId - Source topic ID.
- Task
Type float64 - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - Backup
Give boolUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - Cls
Data stringTransform Id - ID of the resource.
- Data
Transform []ClsSql Data Sources Data Transform Data Transform Sql Data Source Args - Associated data source information.
- Data
Transform float64Type - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - Dst
Resources []ClsData Transform Dst Resource Args - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - Enable
Flag float64 - Task enable flag.
1: enable,2: disable, Default is1. - Env
Infos []ClsData Transform Env Info Args - Set environment variables.
- Failure
Log stringKey - Field name for failure logs.
- Has
Services float64Log - Whether to enable service log delivery.
1: disable;2: enable. - Keep
Failure float64Log - Keep failure log status.
1: do not keep (default);2: keep. - Name string
- Task name.
- Process
From float64Timestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- Process
To float64Timestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- etl
Content String - Data transform content. If
func_typeis2, must uselog_auto_output. - func
Type Double - Task type.
1: Specify the theme;2: Dynamic creation. - src
Topic StringId - Source topic ID.
- task
Type Double - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - backup
Give BooleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data StringTransform Id - ID of the resource.
- data
Transform List<ClsSql Data Sources Data Transform Data Transform Sql Data Source> - Associated data source information.
- data
Transform DoubleType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources List<ClsData Transform Dst Resource> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag Double - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos List<ClsData Transform Env Info> - Set environment variables.
- failure
Log StringKey - Field name for failure logs.
- has
Services DoubleLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure DoubleLog - Keep failure log status.
1: do not keep (default);2: keep. - name String
- Task name.
- process
From DoubleTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To DoubleTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - func
Type number - Task type.
1: Specify the theme;2: Dynamic creation. - src
Topic stringId - Source topic ID.
- task
Type number - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - backup
Give booleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data stringTransform Id - ID of the resource.
- data
Transform ClsSql Data Sources Data Transform Data Transform Sql Data Source[] - Associated data source information.
- data
Transform numberType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources ClsData Transform Dst Resource[] - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag number - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos ClsData Transform Env Info[] - Set environment variables.
- failure
Log stringKey - Field name for failure logs.
- has
Services numberLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure numberLog - Keep failure log status.
1: do not keep (default);2: keep. - name string
- Task name.
- process
From numberTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To numberTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- etl_
content str - Data transform content. If
func_typeis2, must uselog_auto_output. - func_
type float - Task type.
1: Specify the theme;2: Dynamic creation. - src_
topic_ strid - Source topic ID.
- task_
type float - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - backup_
give_ boolup_ data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls_
data_ strtransform_ id - ID of the resource.
- data_
transform_ Sequence[Clssql_ data_ sources Data Transform Data Transform Sql Data Source Args] - Associated data source information.
- data_
transform_ floattype - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst_
resources Sequence[ClsData Transform Dst Resource Args] - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable_
flag float - Task enable flag.
1: enable,2: disable, Default is1. - env_
infos Sequence[ClsData Transform Env Info Args] - Set environment variables.
- failure_
log_ strkey - Field name for failure logs.
- has_
services_ floatlog - Whether to enable service log delivery.
1: disable;2: enable. - keep_
failure_ floatlog - Keep failure log status.
1: do not keep (default);2: keep. - name str
- Task name.
- process_
from_ floattimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process_
to_ floattimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- etl
Content String - Data transform content. If
func_typeis2, must uselog_auto_output. - func
Type Number - Task type.
1: Specify the theme;2: Dynamic creation. - src
Topic StringId - Source topic ID.
- task
Type Number - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks. - backup
Give BooleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data StringTransform Id - ID of the resource.
- data
Transform List<Property Map>Sql Data Sources - Associated data source information.
- data
Transform NumberType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources List<Property Map> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag Number - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos List<Property Map> - Set environment variables.
- failure
Log StringKey - Field name for failure logs.
- has
Services NumberLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure NumberLog - Keep failure log status.
1: do not keep (default);2: keep. - name String
- Task name.
- process
From NumberTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To NumberTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
Outputs
All input properties are implicitly available as output properties. Additionally, the ClsDataTransform resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing ClsDataTransform Resource
Get an existing ClsDataTransform resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: ClsDataTransformState, opts?: CustomResourceOptions): ClsDataTransform@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
backup_give_up_data: Optional[bool] = None,
cls_data_transform_id: Optional[str] = None,
data_transform_sql_data_sources: Optional[Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]] = None,
data_transform_type: Optional[float] = None,
dst_resources: Optional[Sequence[ClsDataTransformDstResourceArgs]] = None,
enable_flag: Optional[float] = None,
env_infos: Optional[Sequence[ClsDataTransformEnvInfoArgs]] = None,
etl_content: Optional[str] = None,
failure_log_key: Optional[str] = None,
func_type: Optional[float] = None,
has_services_log: Optional[float] = None,
keep_failure_log: Optional[float] = None,
name: Optional[str] = None,
process_from_timestamp: Optional[float] = None,
process_to_timestamp: Optional[float] = None,
src_topic_id: Optional[str] = None,
task_type: Optional[float] = None) -> ClsDataTransformfunc GetClsDataTransform(ctx *Context, name string, id IDInput, state *ClsDataTransformState, opts ...ResourceOption) (*ClsDataTransform, error)public static ClsDataTransform Get(string name, Input<string> id, ClsDataTransformState? state, CustomResourceOptions? opts = null)public static ClsDataTransform get(String name, Output<String> id, ClsDataTransformState state, CustomResourceOptions options)resources: _: type: tencentcloud:ClsDataTransform get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Backup
Give boolUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - Cls
Data stringTransform Id - ID of the resource.
- Data
Transform List<ClsSql Data Sources Data Transform Data Transform Sql Data Source> - Associated data source information.
- Data
Transform doubleType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - Dst
Resources List<ClsData Transform Dst Resource> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - Enable
Flag double - Task enable flag.
1: enable,2: disable, Default is1. - Env
Infos List<ClsData Transform Env Info> - Set environment variables.
- Etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - Failure
Log stringKey - Field name for failure logs.
- Func
Type double - Task type.
1: Specify the theme;2: Dynamic creation. - Has
Services doubleLog - Whether to enable service log delivery.
1: disable;2: enable. - Keep
Failure doubleLog - Keep failure log status.
1: do not keep (default);2: keep. - Name string
- Task name.
- Process
From doubleTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- Process
To doubleTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- Src
Topic stringId - Source topic ID.
- Task
Type double - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
- Backup
Give boolUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - Cls
Data stringTransform Id - ID of the resource.
- Data
Transform []ClsSql Data Sources Data Transform Data Transform Sql Data Source Args - Associated data source information.
- Data
Transform float64Type - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - Dst
Resources []ClsData Transform Dst Resource Args - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - Enable
Flag float64 - Task enable flag.
1: enable,2: disable, Default is1. - Env
Infos []ClsData Transform Env Info Args - Set environment variables.
- Etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - Failure
Log stringKey - Field name for failure logs.
- Func
Type float64 - Task type.
1: Specify the theme;2: Dynamic creation. - Has
Services float64Log - Whether to enable service log delivery.
1: disable;2: enable. - Keep
Failure float64Log - Keep failure log status.
1: do not keep (default);2: keep. - Name string
- Task name.
- Process
From float64Timestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- Process
To float64Timestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- Src
Topic stringId - Source topic ID.
- Task
Type float64 - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
- backup
Give BooleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data StringTransform Id - ID of the resource.
- data
Transform List<ClsSql Data Sources Data Transform Data Transform Sql Data Source> - Associated data source information.
- data
Transform DoubleType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources List<ClsData Transform Dst Resource> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag Double - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos List<ClsData Transform Env Info> - Set environment variables.
- etl
Content String - Data transform content. If
func_typeis2, must uselog_auto_output. - failure
Log StringKey - Field name for failure logs.
- func
Type Double - Task type.
1: Specify the theme;2: Dynamic creation. - has
Services DoubleLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure DoubleLog - Keep failure log status.
1: do not keep (default);2: keep. - name String
- Task name.
- process
From DoubleTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To DoubleTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- src
Topic StringId - Source topic ID.
- task
Type Double - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
- backup
Give booleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data stringTransform Id - ID of the resource.
- data
Transform ClsSql Data Sources Data Transform Data Transform Sql Data Source[] - Associated data source information.
- data
Transform numberType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources ClsData Transform Dst Resource[] - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag number - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos ClsData Transform Env Info[] - Set environment variables.
- etl
Content string - Data transform content. If
func_typeis2, must uselog_auto_output. - failure
Log stringKey - Field name for failure logs.
- func
Type number - Task type.
1: Specify the theme;2: Dynamic creation. - has
Services numberLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure numberLog - Keep failure log status.
1: do not keep (default);2: keep. - name string
- Task name.
- process
From numberTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To numberTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- src
Topic stringId - Source topic ID.
- task
Type number - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
- backup_
give_ boolup_ data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls_
data_ strtransform_ id - ID of the resource.
- data_
transform_ Sequence[Clssql_ data_ sources Data Transform Data Transform Sql Data Source Args] - Associated data source information.
- data_
transform_ floattype - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst_
resources Sequence[ClsData Transform Dst Resource Args] - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable_
flag float - Task enable flag.
1: enable,2: disable, Default is1. - env_
infos Sequence[ClsData Transform Env Info Args] - Set environment variables.
- etl_
content str - Data transform content. If
func_typeis2, must uselog_auto_output. - failure_
log_ strkey - Field name for failure logs.
- func_
type float - Task type.
1: Specify the theme;2: Dynamic creation. - has_
services_ floatlog - Whether to enable service log delivery.
1: disable;2: enable. - keep_
failure_ floatlog - Keep failure log status.
1: do not keep (default);2: keep. - name str
- Task name.
- process_
from_ floattimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process_
to_ floattimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- src_
topic_ strid - Source topic ID.
- task_
type float - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
- backup
Give BooleanUp Data - When
func_typeis2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default isfalse.false: Create backup logset and topic and write logs to the backup topic;true: Discard log data. - cls
Data StringTransform Id - ID of the resource.
- data
Transform List<Property Map>Sql Data Sources - Associated data source information.
- data
Transform NumberType - Data transform type.
0: Standard data transform task;1: Pre-processing data transform task (process collected logs before writing to the log topic). - dst
Resources List<Property Map> - Data transform des resources. If
func_typeis1, this parameter is required. Iffunc_typeis2, this parameter does not need to be filled in. - enable
Flag Number - Task enable flag.
1: enable,2: disable, Default is1. - env
Infos List<Property Map> - Set environment variables.
- etl
Content String - Data transform content. If
func_typeis2, must uselog_auto_output. - failure
Log StringKey - Field name for failure logs.
- func
Type Number - Task type.
1: Specify the theme;2: Dynamic creation. - has
Services NumberLog - Whether to enable service log delivery.
1: disable;2: enable. - keep
Failure NumberLog - Keep failure log status.
1: do not keep (default);2: keep. - name String
- Task name.
- process
From NumberTimestamp - Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
- process
To NumberTimestamp - Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
- src
Topic StringId - Source topic ID.
- task
Type Number - Task type.
1: Use random data from the source log theme for processing preview;2: Use user-defined test data for processing preview;3: Create real machining tasks.
Supporting Types
ClsDataTransformDataTransformSqlDataSource, ClsDataTransformDataTransformSqlDataSourceArgs
- Alias
Name string - Alias. Used in data transform statements.
- Data
Source double - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - Instance
Id string - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - Password string
- MySQL access password.
- Region string
- InstanceId region. For example: ap-guangzhou.
- User string
- MySQL access username.
- Alias
Name string - Alias. Used in data transform statements.
- Data
Source float64 - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - Instance
Id string - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - Password string
- MySQL access password.
- Region string
- InstanceId region. For example: ap-guangzhou.
- User string
- MySQL access username.
- alias
Name String - Alias. Used in data transform statements.
- data
Source Double - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - instance
Id String - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - password String
- MySQL access password.
- region String
- InstanceId region. For example: ap-guangzhou.
- user String
- MySQL access username.
- alias
Name string - Alias. Used in data transform statements.
- data
Source number - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - instance
Id string - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - password string
- MySQL access password.
- region string
- InstanceId region. For example: ap-guangzhou.
- user string
- MySQL access username.
- alias_
name str - Alias. Used in data transform statements.
- data_
source float - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - instance_
id str - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - password str
- MySQL access password.
- region str
- InstanceId region. For example: ap-guangzhou.
- user str
- MySQL access username.
- alias
Name String - Alias. Used in data transform statements.
- data
Source Number - Data source type.
1: MySQL;2: Self-built MySQL;3: PostgreSQL. - instance
Id String - Instance ID. When DataSource is
1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm. - password String
- MySQL access password.
- region String
- InstanceId region. For example: ap-guangzhou.
- user String
- MySQL access username.
ClsDataTransformDstResource, ClsDataTransformDstResourceArgs
ClsDataTransformEnvInfo, ClsDataTransformEnvInfoArgs
Import
CLS data transform can be imported using the id, e.g.
$ pulumi import tencentcloud:index/clsDataTransform:ClsDataTransform example 7b4bcb05-9154-4cdc-a479-f6b5743846e5
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the
tencentcloudTerraform Provider.
published on Friday, Mar 6, 2026 by tencentcloudstack
