1. Packages
  2. Tencentcloud Provider
  3. API Docs
  4. ClsDataTransform
Viewing docs for tencentcloud 1.82.73
published on Friday, Mar 6, 2026 by tencentcloudstack
tencentcloud logo
Viewing docs for tencentcloud 1.82.73
published on Friday, Mar 6, 2026 by tencentcloudstack

    Provides a resource to create a CLS data transform

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as tencentcloud from "@pulumi/tencentcloud";
    
    const logsetSrc = new tencentcloud.ClsLogset("logset_src", {
        logsetName: "tf-example-src",
        tags: {
            createdBy: "terraform",
        },
    });
    const topicSrc = new tencentcloud.ClsTopic("topic_src", {
        topicName: "tf-example_src",
        logsetId: logsetSrc.clsLogsetId,
        autoSplit: false,
        maxSplitPartitions: 20,
        partitionCount: 1,
        period: 10,
        storageType: "hot",
        tags: {
            createdBy: "terraform",
        },
    });
    const logsetDst = new tencentcloud.ClsLogset("logset_dst", {
        logsetName: "tf-example-dst",
        tags: {
            createdBy: "terraform",
        },
    });
    const topicDst = new tencentcloud.ClsTopic("topic_dst", {
        topicName: "tf-example-dst",
        logsetId: logsetDst.clsLogsetId,
        autoSplit: false,
        maxSplitPartitions: 20,
        partitionCount: 1,
        period: 10,
        storageType: "hot",
        tags: {
            createdBy: "terraform",
        },
    });
    const example = new tencentcloud.ClsDataTransform("example", {
        funcType: 1,
        srcTopicId: topicSrc.clsTopicId,
        name: "tf-example",
        etlContent: "ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
        taskType: 3,
        enableFlag: 1,
        dstResources: [{
            topicId: topicDst.clsTopicId,
            alias: "iac-test-dst",
        }],
    });
    
    import pulumi
    import pulumi_tencentcloud as tencentcloud
    
    logset_src = tencentcloud.ClsLogset("logset_src",
        logset_name="tf-example-src",
        tags={
            "createdBy": "terraform",
        })
    topic_src = tencentcloud.ClsTopic("topic_src",
        topic_name="tf-example_src",
        logset_id=logset_src.cls_logset_id,
        auto_split=False,
        max_split_partitions=20,
        partition_count=1,
        period=10,
        storage_type="hot",
        tags={
            "createdBy": "terraform",
        })
    logset_dst = tencentcloud.ClsLogset("logset_dst",
        logset_name="tf-example-dst",
        tags={
            "createdBy": "terraform",
        })
    topic_dst = tencentcloud.ClsTopic("topic_dst",
        topic_name="tf-example-dst",
        logset_id=logset_dst.cls_logset_id,
        auto_split=False,
        max_split_partitions=20,
        partition_count=1,
        period=10,
        storage_type="hot",
        tags={
            "createdBy": "terraform",
        })
    example = tencentcloud.ClsDataTransform("example",
        func_type=1,
        src_topic_id=topic_src.cls_topic_id,
        name="tf-example",
        etl_content="ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
        task_type=3,
        enable_flag=1,
        dst_resources=[{
            "topic_id": topic_dst.cls_topic_id,
            "alias": "iac-test-dst",
        }])
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		logsetSrc, err := tencentcloud.NewClsLogset(ctx, "logset_src", &tencentcloud.ClsLogsetArgs{
    			LogsetName: pulumi.String("tf-example-src"),
    			Tags: pulumi.StringMap{
    				"createdBy": pulumi.String("terraform"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		topicSrc, err := tencentcloud.NewClsTopic(ctx, "topic_src", &tencentcloud.ClsTopicArgs{
    			TopicName:          pulumi.String("tf-example_src"),
    			LogsetId:           logsetSrc.ClsLogsetId,
    			AutoSplit:          pulumi.Bool(false),
    			MaxSplitPartitions: pulumi.Float64(20),
    			PartitionCount:     pulumi.Float64(1),
    			Period:             pulumi.Float64(10),
    			StorageType:        pulumi.String("hot"),
    			Tags: pulumi.StringMap{
    				"createdBy": pulumi.String("terraform"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		logsetDst, err := tencentcloud.NewClsLogset(ctx, "logset_dst", &tencentcloud.ClsLogsetArgs{
    			LogsetName: pulumi.String("tf-example-dst"),
    			Tags: pulumi.StringMap{
    				"createdBy": pulumi.String("terraform"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		topicDst, err := tencentcloud.NewClsTopic(ctx, "topic_dst", &tencentcloud.ClsTopicArgs{
    			TopicName:          pulumi.String("tf-example-dst"),
    			LogsetId:           logsetDst.ClsLogsetId,
    			AutoSplit:          pulumi.Bool(false),
    			MaxSplitPartitions: pulumi.Float64(20),
    			PartitionCount:     pulumi.Float64(1),
    			Period:             pulumi.Float64(10),
    			StorageType:        pulumi.String("hot"),
    			Tags: pulumi.StringMap{
    				"createdBy": pulumi.String("terraform"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		_, err = tencentcloud.NewClsDataTransform(ctx, "example", &tencentcloud.ClsDataTransformArgs{
    			FuncType:   pulumi.Float64(1),
    			SrcTopicId: topicSrc.ClsTopicId,
    			Name:       pulumi.String("tf-example"),
    			EtlContent: pulumi.String("ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")"),
    			TaskType:   pulumi.Float64(3),
    			EnableFlag: pulumi.Float64(1),
    			DstResources: tencentcloud.ClsDataTransformDstResourceArray{
    				&tencentcloud.ClsDataTransformDstResourceArgs{
    					TopicId: topicDst.ClsTopicId,
    					Alias:   pulumi.String("iac-test-dst"),
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Tencentcloud = Pulumi.Tencentcloud;
    
    return await Deployment.RunAsync(() => 
    {
        var logsetSrc = new Tencentcloud.ClsLogset("logset_src", new()
        {
            LogsetName = "tf-example-src",
            Tags = 
            {
                { "createdBy", "terraform" },
            },
        });
    
        var topicSrc = new Tencentcloud.ClsTopic("topic_src", new()
        {
            TopicName = "tf-example_src",
            LogsetId = logsetSrc.ClsLogsetId,
            AutoSplit = false,
            MaxSplitPartitions = 20,
            PartitionCount = 1,
            Period = 10,
            StorageType = "hot",
            Tags = 
            {
                { "createdBy", "terraform" },
            },
        });
    
        var logsetDst = new Tencentcloud.ClsLogset("logset_dst", new()
        {
            LogsetName = "tf-example-dst",
            Tags = 
            {
                { "createdBy", "terraform" },
            },
        });
    
        var topicDst = new Tencentcloud.ClsTopic("topic_dst", new()
        {
            TopicName = "tf-example-dst",
            LogsetId = logsetDst.ClsLogsetId,
            AutoSplit = false,
            MaxSplitPartitions = 20,
            PartitionCount = 1,
            Period = 10,
            StorageType = "hot",
            Tags = 
            {
                { "createdBy", "terraform" },
            },
        });
    
        var example = new Tencentcloud.ClsDataTransform("example", new()
        {
            FuncType = 1,
            SrcTopicId = topicSrc.ClsTopicId,
            Name = "tf-example",
            EtlContent = "ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")",
            TaskType = 3,
            EnableFlag = 1,
            DstResources = new[]
            {
                new Tencentcloud.Inputs.ClsDataTransformDstResourceArgs
                {
                    TopicId = topicDst.ClsTopicId,
                    Alias = "iac-test-dst",
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.tencentcloud.ClsLogset;
    import com.pulumi.tencentcloud.ClsLogsetArgs;
    import com.pulumi.tencentcloud.ClsTopic;
    import com.pulumi.tencentcloud.ClsTopicArgs;
    import com.pulumi.tencentcloud.ClsDataTransform;
    import com.pulumi.tencentcloud.ClsDataTransformArgs;
    import com.pulumi.tencentcloud.inputs.ClsDataTransformDstResourceArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var logsetSrc = new ClsLogset("logsetSrc", ClsLogsetArgs.builder()
                .logsetName("tf-example-src")
                .tags(Map.of("createdBy", "terraform"))
                .build());
    
            var topicSrc = new ClsTopic("topicSrc", ClsTopicArgs.builder()
                .topicName("tf-example_src")
                .logsetId(logsetSrc.clsLogsetId())
                .autoSplit(false)
                .maxSplitPartitions(20.0)
                .partitionCount(1.0)
                .period(10.0)
                .storageType("hot")
                .tags(Map.of("createdBy", "terraform"))
                .build());
    
            var logsetDst = new ClsLogset("logsetDst", ClsLogsetArgs.builder()
                .logsetName("tf-example-dst")
                .tags(Map.of("createdBy", "terraform"))
                .build());
    
            var topicDst = new ClsTopic("topicDst", ClsTopicArgs.builder()
                .topicName("tf-example-dst")
                .logsetId(logsetDst.clsLogsetId())
                .autoSplit(false)
                .maxSplitPartitions(20.0)
                .partitionCount(1.0)
                .period(10.0)
                .storageType("hot")
                .tags(Map.of("createdBy", "terraform"))
                .build());
    
            var example = new ClsDataTransform("example", ClsDataTransformArgs.builder()
                .funcType(1.0)
                .srcTopicId(topicSrc.clsTopicId())
                .name("tf-example")
                .etlContent("ext_sep(\"content\", \"f1, f2, f3\", sep=\",\", quote=\"\", restrict=False, mode=\"overwrite\")fields_drop(\"content\")")
                .taskType(3.0)
                .enableFlag(1.0)
                .dstResources(ClsDataTransformDstResourceArgs.builder()
                    .topicId(topicDst.clsTopicId())
                    .alias("iac-test-dst")
                    .build())
                .build());
    
        }
    }
    
    resources:
      logsetSrc:
        type: tencentcloud:ClsLogset
        name: logset_src
        properties:
          logsetName: tf-example-src
          tags:
            createdBy: terraform
      topicSrc:
        type: tencentcloud:ClsTopic
        name: topic_src
        properties:
          topicName: tf-example_src
          logsetId: ${logsetSrc.clsLogsetId}
          autoSplit: false
          maxSplitPartitions: 20
          partitionCount: 1
          period: 10
          storageType: hot
          tags:
            createdBy: terraform
      logsetDst:
        type: tencentcloud:ClsLogset
        name: logset_dst
        properties:
          logsetName: tf-example-dst
          tags:
            createdBy: terraform
      topicDst:
        type: tencentcloud:ClsTopic
        name: topic_dst
        properties:
          topicName: tf-example-dst
          logsetId: ${logsetDst.clsLogsetId}
          autoSplit: false
          maxSplitPartitions: 20
          partitionCount: 1
          period: 10
          storageType: hot
          tags:
            createdBy: terraform
      example:
        type: tencentcloud:ClsDataTransform
        properties:
          funcType: 1
          srcTopicId: ${topicSrc.clsTopicId}
          name: tf-example
          etlContent: ext_sep("content", "f1, f2, f3", sep=",", quote="", restrict=False, mode="overwrite")fields_drop("content")
          taskType: 3
          enableFlag: 1
          dstResources:
            - topicId: ${topicDst.clsTopicId}
              alias: iac-test-dst
    

    Create ClsDataTransform Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new ClsDataTransform(name: string, args: ClsDataTransformArgs, opts?: CustomResourceOptions);
    @overload
    def ClsDataTransform(resource_name: str,
                         args: ClsDataTransformArgs,
                         opts: Optional[ResourceOptions] = None)
    
    @overload
    def ClsDataTransform(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         etl_content: Optional[str] = None,
                         task_type: Optional[float] = None,
                         src_topic_id: Optional[str] = None,
                         func_type: Optional[float] = None,
                         failure_log_key: Optional[str] = None,
                         enable_flag: Optional[float] = None,
                         env_infos: Optional[Sequence[ClsDataTransformEnvInfoArgs]] = None,
                         dst_resources: Optional[Sequence[ClsDataTransformDstResourceArgs]] = None,
                         backup_give_up_data: Optional[bool] = None,
                         data_transform_type: Optional[float] = None,
                         has_services_log: Optional[float] = None,
                         keep_failure_log: Optional[float] = None,
                         name: Optional[str] = None,
                         process_from_timestamp: Optional[float] = None,
                         process_to_timestamp: Optional[float] = None,
                         data_transform_sql_data_sources: Optional[Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]] = None,
                         cls_data_transform_id: Optional[str] = None)
    func NewClsDataTransform(ctx *Context, name string, args ClsDataTransformArgs, opts ...ResourceOption) (*ClsDataTransform, error)
    public ClsDataTransform(string name, ClsDataTransformArgs args, CustomResourceOptions? opts = null)
    public ClsDataTransform(String name, ClsDataTransformArgs args)
    public ClsDataTransform(String name, ClsDataTransformArgs args, CustomResourceOptions options)
    
    type: tencentcloud:ClsDataTransform
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args ClsDataTransformArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ClsDataTransformArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ClsDataTransformArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ClsDataTransformArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ClsDataTransformArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    ClsDataTransform Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The ClsDataTransform resource accepts the following input properties:

    EtlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    FuncType double
    Task type. 1: Specify the theme; 2: Dynamic creation.
    SrcTopicId string
    Source topic ID.
    TaskType double
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    BackupGiveUpData bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    ClsDataTransformId string
    ID of the resource.
    DataTransformSqlDataSources List<ClsDataTransformDataTransformSqlDataSource>
    Associated data source information.
    DataTransformType double
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    DstResources List<ClsDataTransformDstResource>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    EnableFlag double
    Task enable flag. 1: enable, 2: disable, Default is 1.
    EnvInfos List<ClsDataTransformEnvInfo>
    Set environment variables.
    FailureLogKey string
    Field name for failure logs.
    HasServicesLog double
    Whether to enable service log delivery. 1: disable; 2: enable.
    KeepFailureLog double
    Keep failure log status. 1: do not keep (default); 2: keep.
    Name string
    Task name.
    ProcessFromTimestamp double
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    ProcessToTimestamp double
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    EtlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    FuncType float64
    Task type. 1: Specify the theme; 2: Dynamic creation.
    SrcTopicId string
    Source topic ID.
    TaskType float64
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    BackupGiveUpData bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    ClsDataTransformId string
    ID of the resource.
    DataTransformSqlDataSources []ClsDataTransformDataTransformSqlDataSourceArgs
    Associated data source information.
    DataTransformType float64
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    DstResources []ClsDataTransformDstResourceArgs
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    EnableFlag float64
    Task enable flag. 1: enable, 2: disable, Default is 1.
    EnvInfos []ClsDataTransformEnvInfoArgs
    Set environment variables.
    FailureLogKey string
    Field name for failure logs.
    HasServicesLog float64
    Whether to enable service log delivery. 1: disable; 2: enable.
    KeepFailureLog float64
    Keep failure log status. 1: do not keep (default); 2: keep.
    Name string
    Task name.
    ProcessFromTimestamp float64
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    ProcessToTimestamp float64
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    etlContent String
    Data transform content. If func_type is 2, must use log_auto_output.
    funcType Double
    Task type. 1: Specify the theme; 2: Dynamic creation.
    srcTopicId String
    Source topic ID.
    taskType Double
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData Boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId String
    ID of the resource.
    dataTransformSqlDataSources List<ClsDataTransformDataTransformSqlDataSource>
    Associated data source information.
    dataTransformType Double
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources List<ClsDataTransformDstResource>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag Double
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos List<ClsDataTransformEnvInfo>
    Set environment variables.
    failureLogKey String
    Field name for failure logs.
    hasServicesLog Double
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog Double
    Keep failure log status. 1: do not keep (default); 2: keep.
    name String
    Task name.
    processFromTimestamp Double
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp Double
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    etlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    funcType number
    Task type. 1: Specify the theme; 2: Dynamic creation.
    srcTopicId string
    Source topic ID.
    taskType number
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId string
    ID of the resource.
    dataTransformSqlDataSources ClsDataTransformDataTransformSqlDataSource[]
    Associated data source information.
    dataTransformType number
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources ClsDataTransformDstResource[]
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag number
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos ClsDataTransformEnvInfo[]
    Set environment variables.
    failureLogKey string
    Field name for failure logs.
    hasServicesLog number
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog number
    Keep failure log status. 1: do not keep (default); 2: keep.
    name string
    Task name.
    processFromTimestamp number
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp number
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    etl_content str
    Data transform content. If func_type is 2, must use log_auto_output.
    func_type float
    Task type. 1: Specify the theme; 2: Dynamic creation.
    src_topic_id str
    Source topic ID.
    task_type float
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backup_give_up_data bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    cls_data_transform_id str
    ID of the resource.
    data_transform_sql_data_sources Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]
    Associated data source information.
    data_transform_type float
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dst_resources Sequence[ClsDataTransformDstResourceArgs]
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enable_flag float
    Task enable flag. 1: enable, 2: disable, Default is 1.
    env_infos Sequence[ClsDataTransformEnvInfoArgs]
    Set environment variables.
    failure_log_key str
    Field name for failure logs.
    has_services_log float
    Whether to enable service log delivery. 1: disable; 2: enable.
    keep_failure_log float
    Keep failure log status. 1: do not keep (default); 2: keep.
    name str
    Task name.
    process_from_timestamp float
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    process_to_timestamp float
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    etlContent String
    Data transform content. If func_type is 2, must use log_auto_output.
    funcType Number
    Task type. 1: Specify the theme; 2: Dynamic creation.
    srcTopicId String
    Source topic ID.
    taskType Number
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData Boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId String
    ID of the resource.
    dataTransformSqlDataSources List<Property Map>
    Associated data source information.
    dataTransformType Number
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources List<Property Map>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag Number
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos List<Property Map>
    Set environment variables.
    failureLogKey String
    Field name for failure logs.
    hasServicesLog Number
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog Number
    Keep failure log status. 1: do not keep (default); 2: keep.
    name String
    Task name.
    processFromTimestamp Number
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp Number
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the ClsDataTransform resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing ClsDataTransform Resource

    Get an existing ClsDataTransform resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ClsDataTransformState, opts?: CustomResourceOptions): ClsDataTransform
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            backup_give_up_data: Optional[bool] = None,
            cls_data_transform_id: Optional[str] = None,
            data_transform_sql_data_sources: Optional[Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]] = None,
            data_transform_type: Optional[float] = None,
            dst_resources: Optional[Sequence[ClsDataTransformDstResourceArgs]] = None,
            enable_flag: Optional[float] = None,
            env_infos: Optional[Sequence[ClsDataTransformEnvInfoArgs]] = None,
            etl_content: Optional[str] = None,
            failure_log_key: Optional[str] = None,
            func_type: Optional[float] = None,
            has_services_log: Optional[float] = None,
            keep_failure_log: Optional[float] = None,
            name: Optional[str] = None,
            process_from_timestamp: Optional[float] = None,
            process_to_timestamp: Optional[float] = None,
            src_topic_id: Optional[str] = None,
            task_type: Optional[float] = None) -> ClsDataTransform
    func GetClsDataTransform(ctx *Context, name string, id IDInput, state *ClsDataTransformState, opts ...ResourceOption) (*ClsDataTransform, error)
    public static ClsDataTransform Get(string name, Input<string> id, ClsDataTransformState? state, CustomResourceOptions? opts = null)
    public static ClsDataTransform get(String name, Output<String> id, ClsDataTransformState state, CustomResourceOptions options)
    resources:  _:    type: tencentcloud:ClsDataTransform    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    BackupGiveUpData bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    ClsDataTransformId string
    ID of the resource.
    DataTransformSqlDataSources List<ClsDataTransformDataTransformSqlDataSource>
    Associated data source information.
    DataTransformType double
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    DstResources List<ClsDataTransformDstResource>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    EnableFlag double
    Task enable flag. 1: enable, 2: disable, Default is 1.
    EnvInfos List<ClsDataTransformEnvInfo>
    Set environment variables.
    EtlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    FailureLogKey string
    Field name for failure logs.
    FuncType double
    Task type. 1: Specify the theme; 2: Dynamic creation.
    HasServicesLog double
    Whether to enable service log delivery. 1: disable; 2: enable.
    KeepFailureLog double
    Keep failure log status. 1: do not keep (default); 2: keep.
    Name string
    Task name.
    ProcessFromTimestamp double
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    ProcessToTimestamp double
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    SrcTopicId string
    Source topic ID.
    TaskType double
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    BackupGiveUpData bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    ClsDataTransformId string
    ID of the resource.
    DataTransformSqlDataSources []ClsDataTransformDataTransformSqlDataSourceArgs
    Associated data source information.
    DataTransformType float64
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    DstResources []ClsDataTransformDstResourceArgs
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    EnableFlag float64
    Task enable flag. 1: enable, 2: disable, Default is 1.
    EnvInfos []ClsDataTransformEnvInfoArgs
    Set environment variables.
    EtlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    FailureLogKey string
    Field name for failure logs.
    FuncType float64
    Task type. 1: Specify the theme; 2: Dynamic creation.
    HasServicesLog float64
    Whether to enable service log delivery. 1: disable; 2: enable.
    KeepFailureLog float64
    Keep failure log status. 1: do not keep (default); 2: keep.
    Name string
    Task name.
    ProcessFromTimestamp float64
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    ProcessToTimestamp float64
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    SrcTopicId string
    Source topic ID.
    TaskType float64
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData Boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId String
    ID of the resource.
    dataTransformSqlDataSources List<ClsDataTransformDataTransformSqlDataSource>
    Associated data source information.
    dataTransformType Double
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources List<ClsDataTransformDstResource>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag Double
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos List<ClsDataTransformEnvInfo>
    Set environment variables.
    etlContent String
    Data transform content. If func_type is 2, must use log_auto_output.
    failureLogKey String
    Field name for failure logs.
    funcType Double
    Task type. 1: Specify the theme; 2: Dynamic creation.
    hasServicesLog Double
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog Double
    Keep failure log status. 1: do not keep (default); 2: keep.
    name String
    Task name.
    processFromTimestamp Double
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp Double
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    srcTopicId String
    Source topic ID.
    taskType Double
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId string
    ID of the resource.
    dataTransformSqlDataSources ClsDataTransformDataTransformSqlDataSource[]
    Associated data source information.
    dataTransformType number
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources ClsDataTransformDstResource[]
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag number
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos ClsDataTransformEnvInfo[]
    Set environment variables.
    etlContent string
    Data transform content. If func_type is 2, must use log_auto_output.
    failureLogKey string
    Field name for failure logs.
    funcType number
    Task type. 1: Specify the theme; 2: Dynamic creation.
    hasServicesLog number
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog number
    Keep failure log status. 1: do not keep (default); 2: keep.
    name string
    Task name.
    processFromTimestamp number
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp number
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    srcTopicId string
    Source topic ID.
    taskType number
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backup_give_up_data bool
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    cls_data_transform_id str
    ID of the resource.
    data_transform_sql_data_sources Sequence[ClsDataTransformDataTransformSqlDataSourceArgs]
    Associated data source information.
    data_transform_type float
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dst_resources Sequence[ClsDataTransformDstResourceArgs]
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enable_flag float
    Task enable flag. 1: enable, 2: disable, Default is 1.
    env_infos Sequence[ClsDataTransformEnvInfoArgs]
    Set environment variables.
    etl_content str
    Data transform content. If func_type is 2, must use log_auto_output.
    failure_log_key str
    Field name for failure logs.
    func_type float
    Task type. 1: Specify the theme; 2: Dynamic creation.
    has_services_log float
    Whether to enable service log delivery. 1: disable; 2: enable.
    keep_failure_log float
    Keep failure log status. 1: do not keep (default); 2: keep.
    name str
    Task name.
    process_from_timestamp float
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    process_to_timestamp float
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    src_topic_id str
    Source topic ID.
    task_type float
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.
    backupGiveUpData Boolean
    When func_type is 2, whether to discard data when the number of dynamically created logsets and topics exceeds the product specification limit. Default is false. false: Create backup logset and topic and write logs to the backup topic; true: Discard log data.
    clsDataTransformId String
    ID of the resource.
    dataTransformSqlDataSources List<Property Map>
    Associated data source information.
    dataTransformType Number
    Data transform type. 0: Standard data transform task; 1: Pre-processing data transform task (process collected logs before writing to the log topic).
    dstResources List<Property Map>
    Data transform des resources. If func_type is 1, this parameter is required. If func_type is 2, this parameter does not need to be filled in.
    enableFlag Number
    Task enable flag. 1: enable, 2: disable, Default is 1.
    envInfos List<Property Map>
    Set environment variables.
    etlContent String
    Data transform content. If func_type is 2, must use log_auto_output.
    failureLogKey String
    Field name for failure logs.
    funcType Number
    Task type. 1: Specify the theme; 2: Dynamic creation.
    hasServicesLog Number
    Whether to enable service log delivery. 1: disable; 2: enable.
    keepFailureLog Number
    Keep failure log status. 1: do not keep (default); 2: keep.
    name String
    Task name.
    processFromTimestamp Number
    Specify the start time of processing data, in seconds-level timestamp. Any time range within the log topic lifecycle. If it exceeds the lifecycle, only the part with data within the lifecycle is processed.
    processToTimestamp Number
    Specify the end time of processing data, in seconds-level timestamp. Cannot specify a future time. If not filled, it means continuous execution.
    srcTopicId String
    Source topic ID.
    taskType Number
    Task type. 1: Use random data from the source log theme for processing preview; 2: Use user-defined test data for processing preview; 3: Create real machining tasks.

    Supporting Types

    ClsDataTransformDataTransformSqlDataSource, ClsDataTransformDataTransformSqlDataSourceArgs

    AliasName string
    Alias. Used in data transform statements.
    DataSource double
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    InstanceId string
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    Password string
    MySQL access password.
    Region string
    InstanceId region. For example: ap-guangzhou.
    User string
    MySQL access username.
    AliasName string
    Alias. Used in data transform statements.
    DataSource float64
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    InstanceId string
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    Password string
    MySQL access password.
    Region string
    InstanceId region. For example: ap-guangzhou.
    User string
    MySQL access username.
    aliasName String
    Alias. Used in data transform statements.
    dataSource Double
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    instanceId String
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    password String
    MySQL access password.
    region String
    InstanceId region. For example: ap-guangzhou.
    user String
    MySQL access username.
    aliasName string
    Alias. Used in data transform statements.
    dataSource number
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    instanceId string
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    password string
    MySQL access password.
    region string
    InstanceId region. For example: ap-guangzhou.
    user string
    MySQL access username.
    alias_name str
    Alias. Used in data transform statements.
    data_source float
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    instance_id str
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    password str
    MySQL access password.
    region str
    InstanceId region. For example: ap-guangzhou.
    user str
    MySQL access username.
    aliasName String
    Alias. Used in data transform statements.
    dataSource Number
    Data source type. 1: MySQL; 2: Self-built MySQL; 3: PostgreSQL.
    instanceId String
    Instance ID. When DataSource is 1, it represents the cloud database MySQL instance ID, such as: cdb-zxcvbnm.
    password String
    MySQL access password.
    region String
    InstanceId region. For example: ap-guangzhou.
    user String
    MySQL access username.

    ClsDataTransformDstResource, ClsDataTransformDstResourceArgs

    Alias string
    Alias.
    TopicId string
    Dst topic ID.
    Alias string
    Alias.
    TopicId string
    Dst topic ID.
    alias String
    Alias.
    topicId String
    Dst topic ID.
    alias string
    Alias.
    topicId string
    Dst topic ID.
    alias str
    Alias.
    topic_id str
    Dst topic ID.
    alias String
    Alias.
    topicId String
    Dst topic ID.

    ClsDataTransformEnvInfo, ClsDataTransformEnvInfoArgs

    Key string
    Environment variable name.
    Value string
    Environment variable value.
    Key string
    Environment variable name.
    Value string
    Environment variable value.
    key String
    Environment variable name.
    value String
    Environment variable value.
    key string
    Environment variable name.
    value string
    Environment variable value.
    key str
    Environment variable name.
    value str
    Environment variable value.
    key String
    Environment variable name.
    value String
    Environment variable value.

    Import

    CLS data transform can be imported using the id, e.g.

    $ pulumi import tencentcloud:index/clsDataTransform:ClsDataTransform example 7b4bcb05-9154-4cdc-a479-f6b5743846e5
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    tencentcloud tencentcloudstack/terraform-provider-tencentcloud
    License
    Notes
    This Pulumi package is based on the tencentcloud Terraform Provider.
    tencentcloud logo
    Viewing docs for tencentcloud 1.82.73
    published on Friday, Mar 6, 2026 by tencentcloudstack
      Try Pulumi Cloud free. Your team will thank you.