1. Packages
  2. Alibaba Cloud
  3. API Docs
  4. log
  5. Etl
Alibaba Cloud v3.57.0 published on Saturday, Jun 15, 2024 by Pulumi

alicloud.log.Etl

Explore with Pulumi AI

alicloud logo
Alibaba Cloud v3.57.0 published on Saturday, Jun 15, 2024 by Pulumi

    The data transformation of the log service is a hosted, highly available, and scalable data processing service, which is widely applicable to scenarios such as data regularization, enrichment, distribution, aggregation, and index reconstruction. Refer to details.

    NOTE: Available in 1.120.0

    Example Usage

    Basic Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as alicloud from "@pulumi/alicloud";
    import * as random from "@pulumi/random";
    
    const _default = new random.index.Integer("default", {
        max: 99999,
        min: 10000,
    });
    const example = new alicloud.log.Project("example", {
        name: `terraform-example-${_default.result}`,
        description: "terraform-example",
    });
    const exampleStore = new alicloud.log.Store("example", {
        project: example.name,
        name: "example-store",
        retentionPeriod: 3650,
        shardCount: 3,
        autoSplit: true,
        maxSplitShardCount: 60,
        appendMeta: true,
    });
    const example2 = new alicloud.log.Store("example2", {
        project: example.name,
        name: "example-store2",
        retentionPeriod: 3650,
        shardCount: 3,
        autoSplit: true,
        maxSplitShardCount: 60,
        appendMeta: true,
    });
    const example3 = new alicloud.log.Store("example3", {
        project: example.name,
        name: "example-store3",
        retentionPeriod: 3650,
        shardCount: 3,
        autoSplit: true,
        maxSplitShardCount: 60,
        appendMeta: true,
    });
    const exampleEtl = new alicloud.log.Etl("example", {
        etlName: "terraform-example",
        project: example.name,
        displayName: "terraform-example",
        description: "terraform-example",
        accessKeyId: "access_key_id",
        accessKeySecret: "access_key_secret",
        script: "e_set('new','key')",
        logstore: exampleStore.name,
        etlSinks: [
            {
                name: "target_name",
                accessKeyId: "example2_access_key_id",
                accessKeySecret: "example2_access_key_secret",
                endpoint: "cn-hangzhou.log.aliyuncs.com",
                project: example.name,
                logstore: example2.name,
            },
            {
                name: "target_name2",
                accessKeyId: "example3_access_key_id",
                accessKeySecret: "example3_access_key_secret",
                endpoint: "cn-hangzhou.log.aliyuncs.com",
                project: example.name,
                logstore: example3.name,
            },
        ],
    });
    
    import pulumi
    import pulumi_alicloud as alicloud
    import pulumi_random as random
    
    default = random.index.Integer("default",
        max=99999,
        min=10000)
    example = alicloud.log.Project("example",
        name=f"terraform-example-{default['result']}",
        description="terraform-example")
    example_store = alicloud.log.Store("example",
        project=example.name,
        name="example-store",
        retention_period=3650,
        shard_count=3,
        auto_split=True,
        max_split_shard_count=60,
        append_meta=True)
    example2 = alicloud.log.Store("example2",
        project=example.name,
        name="example-store2",
        retention_period=3650,
        shard_count=3,
        auto_split=True,
        max_split_shard_count=60,
        append_meta=True)
    example3 = alicloud.log.Store("example3",
        project=example.name,
        name="example-store3",
        retention_period=3650,
        shard_count=3,
        auto_split=True,
        max_split_shard_count=60,
        append_meta=True)
    example_etl = alicloud.log.Etl("example",
        etl_name="terraform-example",
        project=example.name,
        display_name="terraform-example",
        description="terraform-example",
        access_key_id="access_key_id",
        access_key_secret="access_key_secret",
        script="e_set('new','key')",
        logstore=example_store.name,
        etl_sinks=[
            alicloud.log.EtlEtlSinkArgs(
                name="target_name",
                access_key_id="example2_access_key_id",
                access_key_secret="example2_access_key_secret",
                endpoint="cn-hangzhou.log.aliyuncs.com",
                project=example.name,
                logstore=example2.name,
            ),
            alicloud.log.EtlEtlSinkArgs(
                name="target_name2",
                access_key_id="example3_access_key_id",
                access_key_secret="example3_access_key_secret",
                endpoint="cn-hangzhou.log.aliyuncs.com",
                project=example.name,
                logstore=example3.name,
            ),
        ])
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
    	"github.com/pulumi/pulumi-random/sdk/v4/go/random"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := random.NewInteger(ctx, "default", &random.IntegerArgs{
    			Max: 99999,
    			Min: 10000,
    		})
    		if err != nil {
    			return err
    		}
    		example, err := log.NewProject(ctx, "example", &log.ProjectArgs{
    			Name:        pulumi.String(fmt.Sprintf("terraform-example-%v", _default.Result)),
    			Description: pulumi.String("terraform-example"),
    		})
    		if err != nil {
    			return err
    		}
    		exampleStore, err := log.NewStore(ctx, "example", &log.StoreArgs{
    			Project:            example.Name,
    			Name:               pulumi.String("example-store"),
    			RetentionPeriod:    pulumi.Int(3650),
    			ShardCount:         pulumi.Int(3),
    			AutoSplit:          pulumi.Bool(true),
    			MaxSplitShardCount: pulumi.Int(60),
    			AppendMeta:         pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		example2, err := log.NewStore(ctx, "example2", &log.StoreArgs{
    			Project:            example.Name,
    			Name:               pulumi.String("example-store2"),
    			RetentionPeriod:    pulumi.Int(3650),
    			ShardCount:         pulumi.Int(3),
    			AutoSplit:          pulumi.Bool(true),
    			MaxSplitShardCount: pulumi.Int(60),
    			AppendMeta:         pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		example3, err := log.NewStore(ctx, "example3", &log.StoreArgs{
    			Project:            example.Name,
    			Name:               pulumi.String("example-store3"),
    			RetentionPeriod:    pulumi.Int(3650),
    			ShardCount:         pulumi.Int(3),
    			AutoSplit:          pulumi.Bool(true),
    			MaxSplitShardCount: pulumi.Int(60),
    			AppendMeta:         pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = log.NewEtl(ctx, "example", &log.EtlArgs{
    			EtlName:         pulumi.String("terraform-example"),
    			Project:         example.Name,
    			DisplayName:     pulumi.String("terraform-example"),
    			Description:     pulumi.String("terraform-example"),
    			AccessKeyId:     pulumi.String("access_key_id"),
    			AccessKeySecret: pulumi.String("access_key_secret"),
    			Script:          pulumi.String("e_set('new','key')"),
    			Logstore:        exampleStore.Name,
    			EtlSinks: log.EtlEtlSinkArray{
    				&log.EtlEtlSinkArgs{
    					Name:            pulumi.String("target_name"),
    					AccessKeyId:     pulumi.String("example2_access_key_id"),
    					AccessKeySecret: pulumi.String("example2_access_key_secret"),
    					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
    					Project:         example.Name,
    					Logstore:        example2.Name,
    				},
    				&log.EtlEtlSinkArgs{
    					Name:            pulumi.String("target_name2"),
    					AccessKeyId:     pulumi.String("example3_access_key_id"),
    					AccessKeySecret: pulumi.String("example3_access_key_secret"),
    					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
    					Project:         example.Name,
    					Logstore:        example3.Name,
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using AliCloud = Pulumi.AliCloud;
    using Random = Pulumi.Random;
    
    return await Deployment.RunAsync(() => 
    {
        var @default = new Random.Index.Integer("default", new()
        {
            Max = 99999,
            Min = 10000,
        });
    
        var example = new AliCloud.Log.Project("example", new()
        {
            Name = $"terraform-example-{@default.Result}",
            Description = "terraform-example",
        });
    
        var exampleStore = new AliCloud.Log.Store("example", new()
        {
            Project = example.Name,
            Name = "example-store",
            RetentionPeriod = 3650,
            ShardCount = 3,
            AutoSplit = true,
            MaxSplitShardCount = 60,
            AppendMeta = true,
        });
    
        var example2 = new AliCloud.Log.Store("example2", new()
        {
            Project = example.Name,
            Name = "example-store2",
            RetentionPeriod = 3650,
            ShardCount = 3,
            AutoSplit = true,
            MaxSplitShardCount = 60,
            AppendMeta = true,
        });
    
        var example3 = new AliCloud.Log.Store("example3", new()
        {
            Project = example.Name,
            Name = "example-store3",
            RetentionPeriod = 3650,
            ShardCount = 3,
            AutoSplit = true,
            MaxSplitShardCount = 60,
            AppendMeta = true,
        });
    
        var exampleEtl = new AliCloud.Log.Etl("example", new()
        {
            EtlName = "terraform-example",
            Project = example.Name,
            DisplayName = "terraform-example",
            Description = "terraform-example",
            AccessKeyId = "access_key_id",
            AccessKeySecret = "access_key_secret",
            Script = "e_set('new','key')",
            Logstore = exampleStore.Name,
            EtlSinks = new[]
            {
                new AliCloud.Log.Inputs.EtlEtlSinkArgs
                {
                    Name = "target_name",
                    AccessKeyId = "example2_access_key_id",
                    AccessKeySecret = "example2_access_key_secret",
                    Endpoint = "cn-hangzhou.log.aliyuncs.com",
                    Project = example.Name,
                    Logstore = example2.Name,
                },
                new AliCloud.Log.Inputs.EtlEtlSinkArgs
                {
                    Name = "target_name2",
                    AccessKeyId = "example3_access_key_id",
                    AccessKeySecret = "example3_access_key_secret",
                    Endpoint = "cn-hangzhou.log.aliyuncs.com",
                    Project = example.Name,
                    Logstore = example3.Name,
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.random.integer;
    import com.pulumi.random.IntegerArgs;
    import com.pulumi.alicloud.log.Project;
    import com.pulumi.alicloud.log.ProjectArgs;
    import com.pulumi.alicloud.log.Store;
    import com.pulumi.alicloud.log.StoreArgs;
    import com.pulumi.alicloud.log.Etl;
    import com.pulumi.alicloud.log.EtlArgs;
    import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var default_ = new Integer("default", IntegerArgs.builder()
                .max(99999)
                .min(10000)
                .build());
    
            var example = new Project("example", ProjectArgs.builder()
                .name(String.format("terraform-example-%s", default_.result()))
                .description("terraform-example")
                .build());
    
            var exampleStore = new Store("exampleStore", StoreArgs.builder()
                .project(example.name())
                .name("example-store")
                .retentionPeriod(3650)
                .shardCount(3)
                .autoSplit(true)
                .maxSplitShardCount(60)
                .appendMeta(true)
                .build());
    
            var example2 = new Store("example2", StoreArgs.builder()
                .project(example.name())
                .name("example-store2")
                .retentionPeriod(3650)
                .shardCount(3)
                .autoSplit(true)
                .maxSplitShardCount(60)
                .appendMeta(true)
                .build());
    
            var example3 = new Store("example3", StoreArgs.builder()
                .project(example.name())
                .name("example-store3")
                .retentionPeriod(3650)
                .shardCount(3)
                .autoSplit(true)
                .maxSplitShardCount(60)
                .appendMeta(true)
                .build());
    
            var exampleEtl = new Etl("exampleEtl", EtlArgs.builder()
                .etlName("terraform-example")
                .project(example.name())
                .displayName("terraform-example")
                .description("terraform-example")
                .accessKeyId("access_key_id")
                .accessKeySecret("access_key_secret")
                .script("e_set('new','key')")
                .logstore(exampleStore.name())
                .etlSinks(            
                    EtlEtlSinkArgs.builder()
                        .name("target_name")
                        .accessKeyId("example2_access_key_id")
                        .accessKeySecret("example2_access_key_secret")
                        .endpoint("cn-hangzhou.log.aliyuncs.com")
                        .project(example.name())
                        .logstore(example2.name())
                        .build(),
                    EtlEtlSinkArgs.builder()
                        .name("target_name2")
                        .accessKeyId("example3_access_key_id")
                        .accessKeySecret("example3_access_key_secret")
                        .endpoint("cn-hangzhou.log.aliyuncs.com")
                        .project(example.name())
                        .logstore(example3.name())
                        .build())
                .build());
    
        }
    }
    
    resources:
      default:
        type: random:integer
        properties:
          max: 99999
          min: 10000
      example:
        type: alicloud:log:Project
        properties:
          name: terraform-example-${default.result}
          description: terraform-example
      exampleStore:
        type: alicloud:log:Store
        name: example
        properties:
          project: ${example.name}
          name: example-store
          retentionPeriod: 3650
          shardCount: 3
          autoSplit: true
          maxSplitShardCount: 60
          appendMeta: true
      example2:
        type: alicloud:log:Store
        properties:
          project: ${example.name}
          name: example-store2
          retentionPeriod: 3650
          shardCount: 3
          autoSplit: true
          maxSplitShardCount: 60
          appendMeta: true
      example3:
        type: alicloud:log:Store
        properties:
          project: ${example.name}
          name: example-store3
          retentionPeriod: 3650
          shardCount: 3
          autoSplit: true
          maxSplitShardCount: 60
          appendMeta: true
      exampleEtl:
        type: alicloud:log:Etl
        name: example
        properties:
          etlName: terraform-example
          project: ${example.name}
          displayName: terraform-example
          description: terraform-example
          accessKeyId: access_key_id
          accessKeySecret: access_key_secret
          script: e_set('new','key')
          logstore: ${exampleStore.name}
          etlSinks:
            - name: target_name
              accessKeyId: example2_access_key_id
              accessKeySecret: example2_access_key_secret
              endpoint: cn-hangzhou.log.aliyuncs.com
              project: ${example.name}
              logstore: ${example2.name}
            - name: target_name2
              accessKeyId: example3_access_key_id
              accessKeySecret: example3_access_key_secret
              endpoint: cn-hangzhou.log.aliyuncs.com
              project: ${example.name}
              logstore: ${example3.name}
    

    Create Etl Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new Etl(name: string, args: EtlArgs, opts?: CustomResourceOptions);
    @overload
    def Etl(resource_name: str,
            args: EtlArgs,
            opts: Optional[ResourceOptions] = None)
    
    @overload
    def Etl(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
            script: Optional[str] = None,
            project: Optional[str] = None,
            logstore: Optional[str] = None,
            display_name: Optional[str] = None,
            etl_name: Optional[str] = None,
            kms_encrypted_access_key_secret: Optional[str] = None,
            parameters: Optional[Mapping[str, str]] = None,
            from_time: Optional[int] = None,
            kms_encrypted_access_key_id: Optional[str] = None,
            access_key_id: Optional[str] = None,
            kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
            kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
            last_modified_time: Optional[int] = None,
            description: Optional[str] = None,
            etl_type: Optional[str] = None,
            create_time: Optional[int] = None,
            role_arn: Optional[str] = None,
            schedule: Optional[str] = None,
            access_key_secret: Optional[str] = None,
            status: Optional[str] = None,
            to_time: Optional[int] = None,
            version: Optional[int] = None)
    func NewEtl(ctx *Context, name string, args EtlArgs, opts ...ResourceOption) (*Etl, error)
    public Etl(string name, EtlArgs args, CustomResourceOptions? opts = null)
    public Etl(String name, EtlArgs args)
    public Etl(String name, EtlArgs args, CustomResourceOptions options)
    
    type: alicloud:log:Etl
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args EtlArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args EtlArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args EtlArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args EtlArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args EtlArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Example

    The following reference example uses placeholder values for all input properties.

    var etlResource = new AliCloud.Log.Etl("etlResource", new()
    {
        EtlSinks = new[]
        {
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Endpoint = "string",
                Logstore = "string",
                Name = "string",
                Project = "string",
                AccessKeyId = "string",
                AccessKeySecret = "string",
                KmsEncryptedAccessKeyId = "string",
                KmsEncryptedAccessKeySecret = "string",
                RoleArn = "string",
                Type = "string",
            },
        },
        Script = "string",
        Project = "string",
        Logstore = "string",
        DisplayName = "string",
        EtlName = "string",
        KmsEncryptedAccessKeySecret = "string",
        Parameters = 
        {
            { "string", "string" },
        },
        FromTime = 0,
        KmsEncryptedAccessKeyId = "string",
        AccessKeyId = "string",
        KmsEncryptionAccessKeyIdContext = 
        {
            { "string", "any" },
        },
        KmsEncryptionAccessKeySecretContext = 
        {
            { "string", "any" },
        },
        LastModifiedTime = 0,
        Description = "string",
        EtlType = "string",
        CreateTime = 0,
        RoleArn = "string",
        Schedule = "string",
        AccessKeySecret = "string",
        Status = "string",
        ToTime = 0,
        Version = 0,
    });
    
    example, err := log.NewEtl(ctx, "etlResource", &log.EtlArgs{
    	EtlSinks: log.EtlEtlSinkArray{
    		&log.EtlEtlSinkArgs{
    			Endpoint:                    pulumi.String("string"),
    			Logstore:                    pulumi.String("string"),
    			Name:                        pulumi.String("string"),
    			Project:                     pulumi.String("string"),
    			AccessKeyId:                 pulumi.String("string"),
    			AccessKeySecret:             pulumi.String("string"),
    			KmsEncryptedAccessKeyId:     pulumi.String("string"),
    			KmsEncryptedAccessKeySecret: pulumi.String("string"),
    			RoleArn:                     pulumi.String("string"),
    			Type:                        pulumi.String("string"),
    		},
    	},
    	Script:                      pulumi.String("string"),
    	Project:                     pulumi.String("string"),
    	Logstore:                    pulumi.String("string"),
    	DisplayName:                 pulumi.String("string"),
    	EtlName:                     pulumi.String("string"),
    	KmsEncryptedAccessKeySecret: pulumi.String("string"),
    	Parameters: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	FromTime:                pulumi.Int(0),
    	KmsEncryptedAccessKeyId: pulumi.String("string"),
    	AccessKeyId:             pulumi.String("string"),
    	KmsEncryptionAccessKeyIdContext: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	KmsEncryptionAccessKeySecretContext: pulumi.Map{
    		"string": pulumi.Any("any"),
    	},
    	LastModifiedTime: pulumi.Int(0),
    	Description:      pulumi.String("string"),
    	EtlType:          pulumi.String("string"),
    	CreateTime:       pulumi.Int(0),
    	RoleArn:          pulumi.String("string"),
    	Schedule:         pulumi.String("string"),
    	AccessKeySecret:  pulumi.String("string"),
    	Status:           pulumi.String("string"),
    	ToTime:           pulumi.Int(0),
    	Version:          pulumi.Int(0),
    })
    
    var etlResource = new Etl("etlResource", EtlArgs.builder()
        .etlSinks(EtlEtlSinkArgs.builder()
            .endpoint("string")
            .logstore("string")
            .name("string")
            .project("string")
            .accessKeyId("string")
            .accessKeySecret("string")
            .kmsEncryptedAccessKeyId("string")
            .kmsEncryptedAccessKeySecret("string")
            .roleArn("string")
            .type("string")
            .build())
        .script("string")
        .project("string")
        .logstore("string")
        .displayName("string")
        .etlName("string")
        .kmsEncryptedAccessKeySecret("string")
        .parameters(Map.of("string", "string"))
        .fromTime(0)
        .kmsEncryptedAccessKeyId("string")
        .accessKeyId("string")
        .kmsEncryptionAccessKeyIdContext(Map.of("string", "any"))
        .kmsEncryptionAccessKeySecretContext(Map.of("string", "any"))
        .lastModifiedTime(0)
        .description("string")
        .etlType("string")
        .createTime(0)
        .roleArn("string")
        .schedule("string")
        .accessKeySecret("string")
        .status("string")
        .toTime(0)
        .version(0)
        .build());
    
    etl_resource = alicloud.log.Etl("etlResource",
        etl_sinks=[alicloud.log.EtlEtlSinkArgs(
            endpoint="string",
            logstore="string",
            name="string",
            project="string",
            access_key_id="string",
            access_key_secret="string",
            kms_encrypted_access_key_id="string",
            kms_encrypted_access_key_secret="string",
            role_arn="string",
            type="string",
        )],
        script="string",
        project="string",
        logstore="string",
        display_name="string",
        etl_name="string",
        kms_encrypted_access_key_secret="string",
        parameters={
            "string": "string",
        },
        from_time=0,
        kms_encrypted_access_key_id="string",
        access_key_id="string",
        kms_encryption_access_key_id_context={
            "string": "any",
        },
        kms_encryption_access_key_secret_context={
            "string": "any",
        },
        last_modified_time=0,
        description="string",
        etl_type="string",
        create_time=0,
        role_arn="string",
        schedule="string",
        access_key_secret="string",
        status="string",
        to_time=0,
        version=0)
    
    const etlResource = new alicloud.log.Etl("etlResource", {
        etlSinks: [{
            endpoint: "string",
            logstore: "string",
            name: "string",
            project: "string",
            accessKeyId: "string",
            accessKeySecret: "string",
            kmsEncryptedAccessKeyId: "string",
            kmsEncryptedAccessKeySecret: "string",
            roleArn: "string",
            type: "string",
        }],
        script: "string",
        project: "string",
        logstore: "string",
        displayName: "string",
        etlName: "string",
        kmsEncryptedAccessKeySecret: "string",
        parameters: {
            string: "string",
        },
        fromTime: 0,
        kmsEncryptedAccessKeyId: "string",
        accessKeyId: "string",
        kmsEncryptionAccessKeyIdContext: {
            string: "any",
        },
        kmsEncryptionAccessKeySecretContext: {
            string: "any",
        },
        lastModifiedTime: 0,
        description: "string",
        etlType: "string",
        createTime: 0,
        roleArn: "string",
        schedule: "string",
        accessKeySecret: "string",
        status: "string",
        toTime: 0,
        version: 0,
    });
    
    type: alicloud:log:Etl
    properties:
        accessKeyId: string
        accessKeySecret: string
        createTime: 0
        description: string
        displayName: string
        etlName: string
        etlSinks:
            - accessKeyId: string
              accessKeySecret: string
              endpoint: string
              kmsEncryptedAccessKeyId: string
              kmsEncryptedAccessKeySecret: string
              logstore: string
              name: string
              project: string
              roleArn: string
              type: string
        etlType: string
        fromTime: 0
        kmsEncryptedAccessKeyId: string
        kmsEncryptedAccessKeySecret: string
        kmsEncryptionAccessKeyIdContext:
            string: any
        kmsEncryptionAccessKeySecretContext:
            string: any
        lastModifiedTime: 0
        logstore: string
        parameters:
            string: string
        project: string
        roleArn: string
        schedule: string
        script: string
        status: string
        toTime: 0
        version: 0
    

    Etl Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The Etl resource accepts the following input properties:

    DisplayName string
    Log service etl job alias.
    EtlName string
    The name of the log etl job.
    EtlSinks List<Pulumi.AliCloud.Log.Inputs.EtlEtlSink>
    Target logstore configuration for delivery after data processing.
    Logstore string
    The source logstore of the processing job.
    Project string
    The name of the project where the etl job is located.
    Script string
    Processing operation grammar.
    AccessKeyId string
    Source logstore access key id.
    AccessKeySecret string
    Source logstore access key secret.
    CreateTime int
    The etl job create time.
    Description string
    Description of the log etl job.
    EtlType string
    Log service etl type, the default value is ETL.
    FromTime int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    KmsEncryptionAccessKeyIdContext Dictionary<string, object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    KmsEncryptionAccessKeySecretContext Dictionary<string, object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    LastModifiedTime int
    ETL job last modified time.
    Parameters Dictionary<string, string>
    Advanced parameter configuration of processing operations.
    RoleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Schedule string
    Job scheduling type, the default value is Resident.
    Status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    ToTime int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    Version int
    Log etl job version. the default value is 2.
    DisplayName string
    Log service etl job alias.
    EtlName string
    The name of the log etl job.
    EtlSinks []EtlEtlSinkArgs
    Target logstore configuration for delivery after data processing.
    Logstore string
    The source logstore of the processing job.
    Project string
    The name of the project where the etl job is located.
    Script string
    Processing operation grammar.
    AccessKeyId string
    Source logstore access key id.
    AccessKeySecret string
    Source logstore access key secret.
    CreateTime int
    The etl job create time.
    Description string
    Description of the log etl job.
    EtlType string
    Log service etl type, the default value is ETL.
    FromTime int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    KmsEncryptionAccessKeyIdContext map[string]interface{}
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    KmsEncryptionAccessKeySecretContext map[string]interface{}
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    LastModifiedTime int
    ETL job last modified time.
    Parameters map[string]string
    Advanced parameter configuration of processing operations.
    RoleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Schedule string
    Job scheduling type, the default value is Resident.
    Status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    ToTime int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    Version int
    Log etl job version. the default value is 2.
    displayName String
    Log service etl job alias.
    etlName String
    The name of the log etl job.
    etlSinks List<EtlEtlSink>
    Target logstore configuration for delivery after data processing.
    logstore String
    The source logstore of the processing job.
    project String
    The name of the project where the etl job is located.
    script String
    Processing operation grammar.
    accessKeyId String
    Source logstore access key id.
    accessKeySecret String
    Source logstore access key secret.
    createTime Integer
    The etl job create time.
    description String
    Description of the log etl job.
    etlType String
    Log service etl type, the default value is ETL.
    fromTime Integer
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext Map<String,Object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext Map<String,Object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime Integer
    ETL job last modified time.
    parameters Map<String,String>
    Advanced parameter configuration of processing operations.
    roleArn String
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule String
    Job scheduling type, the default value is Resident.
    status String
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime Integer
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version Integer
    Log etl job version. the default value is 2.
    displayName string
    Log service etl job alias.
    etlName string
    The name of the log etl job.
    etlSinks EtlEtlSink[]
    Target logstore configuration for delivery after data processing.
    logstore string
    The source logstore of the processing job.
    project string
    The name of the project where the etl job is located.
    script string
    Processing operation grammar.
    accessKeyId string
    Source logstore access key id.
    accessKeySecret string
    Source logstore access key secret.
    createTime number
    The etl job create time.
    description string
    Description of the log etl job.
    etlType string
    Log service etl type, the default value is ETL.
    fromTime number
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext {[key: string]: any}
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext {[key: string]: any}
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime number
    ETL job last modified time.
    parameters {[key: string]: string}
    Advanced parameter configuration of processing operations.
    roleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule string
    Job scheduling type, the default value is Resident.
    status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime number
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version number
    Log etl job version. the default value is 2.
    display_name str
    Log service etl job alias.
    etl_name str
    The name of the log etl job.
    etl_sinks Sequence[EtlEtlSinkArgs]
    Target logstore configuration for delivery after data processing.
    logstore str
    The source logstore of the processing job.
    project str
    The name of the project where the etl job is located.
    script str
    Processing operation grammar.
    access_key_id str
    Source logstore access key id.
    access_key_secret str
    Source logstore access key secret.
    create_time int
    The etl job create time.
    description str
    Description of the log etl job.
    etl_type str
    Log service etl type, the default value is ETL.
    from_time int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kms_encrypted_access_key_id str
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kms_encrypted_access_key_secret str
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kms_encryption_access_key_id_context Mapping[str, Any]
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kms_encryption_access_key_secret_context Mapping[str, Any]
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    last_modified_time int
    ETL job last modified time.
    parameters Mapping[str, str]
    Advanced parameter configuration of processing operations.
    role_arn str
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule str
    Job scheduling type, the default value is Resident.
    status str
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    to_time int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version int
    Log etl job version. the default value is 2.
    displayName String
    Log service etl job alias.
    etlName String
    The name of the log etl job.
    etlSinks List<Property Map>
    Target logstore configuration for delivery after data processing.
    logstore String
    The source logstore of the processing job.
    project String
    The name of the project where the etl job is located.
    script String
    Processing operation grammar.
    accessKeyId String
    Source logstore access key id.
    accessKeySecret String
    Source logstore access key secret.
    createTime Number
    The etl job create time.
    description String
    Description of the log etl job.
    etlType String
    Log service etl type, the default value is ETL.
    fromTime Number
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext Map<Any>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext Map<Any>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime Number
    ETL job last modified time.
    parameters Map<String>
    Advanced parameter configuration of processing operations.
    roleArn String
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule String
    Job scheduling type, the default value is Resident.
    status String
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime Number
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version Number
    Log etl job version. the default value is 2.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the Etl resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    Id string
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.
    id string
    The provider-assigned unique ID for this managed resource.
    id str
    The provider-assigned unique ID for this managed resource.
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing Etl Resource

    Get an existing Etl resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: EtlState, opts?: CustomResourceOptions): Etl
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            access_key_id: Optional[str] = None,
            access_key_secret: Optional[str] = None,
            create_time: Optional[int] = None,
            description: Optional[str] = None,
            display_name: Optional[str] = None,
            etl_name: Optional[str] = None,
            etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
            etl_type: Optional[str] = None,
            from_time: Optional[int] = None,
            kms_encrypted_access_key_id: Optional[str] = None,
            kms_encrypted_access_key_secret: Optional[str] = None,
            kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
            kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
            last_modified_time: Optional[int] = None,
            logstore: Optional[str] = None,
            parameters: Optional[Mapping[str, str]] = None,
            project: Optional[str] = None,
            role_arn: Optional[str] = None,
            schedule: Optional[str] = None,
            script: Optional[str] = None,
            status: Optional[str] = None,
            to_time: Optional[int] = None,
            version: Optional[int] = None) -> Etl
    func GetEtl(ctx *Context, name string, id IDInput, state *EtlState, opts ...ResourceOption) (*Etl, error)
    public static Etl Get(string name, Input<string> id, EtlState? state, CustomResourceOptions? opts = null)
    public static Etl get(String name, Output<String> id, EtlState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AccessKeyId string
    Source logstore access key id.
    AccessKeySecret string
    Source logstore access key secret.
    CreateTime int
    The etl job create time.
    Description string
    Description of the log etl job.
    DisplayName string
    Log service etl job alias.
    EtlName string
    The name of the log etl job.
    EtlSinks List<Pulumi.AliCloud.Log.Inputs.EtlEtlSink>
    Target logstore configuration for delivery after data processing.
    EtlType string
    Log service etl type, the default value is ETL.
    FromTime int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    KmsEncryptionAccessKeyIdContext Dictionary<string, object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    KmsEncryptionAccessKeySecretContext Dictionary<string, object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    LastModifiedTime int
    ETL job last modified time.
    Logstore string
    The source logstore of the processing job.
    Parameters Dictionary<string, string>
    Advanced parameter configuration of processing operations.
    Project string
    The name of the project where the etl job is located.
    RoleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Schedule string
    Job scheduling type, the default value is Resident.
    Script string
    Processing operation grammar.
    Status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    ToTime int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    Version int
    Log etl job version. the default value is 2.
    AccessKeyId string
    Source logstore access key id.
    AccessKeySecret string
    Source logstore access key secret.
    CreateTime int
    The etl job create time.
    Description string
    Description of the log etl job.
    DisplayName string
    Log service etl job alias.
    EtlName string
    The name of the log etl job.
    EtlSinks []EtlEtlSinkArgs
    Target logstore configuration for delivery after data processing.
    EtlType string
    Log service etl type, the default value is ETL.
    FromTime int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    KmsEncryptionAccessKeyIdContext map[string]interface{}
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    KmsEncryptionAccessKeySecretContext map[string]interface{}
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    LastModifiedTime int
    ETL job last modified time.
    Logstore string
    The source logstore of the processing job.
    Parameters map[string]string
    Advanced parameter configuration of processing operations.
    Project string
    The name of the project where the etl job is located.
    RoleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Schedule string
    Job scheduling type, the default value is Resident.
    Script string
    Processing operation grammar.
    Status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    ToTime int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    Version int
    Log etl job version. the default value is 2.
    accessKeyId String
    Source logstore access key id.
    accessKeySecret String
    Source logstore access key secret.
    createTime Integer
    The etl job create time.
    description String
    Description of the log etl job.
    displayName String
    Log service etl job alias.
    etlName String
    The name of the log etl job.
    etlSinks List<EtlEtlSink>
    Target logstore configuration for delivery after data processing.
    etlType String
    Log service etl type, the default value is ETL.
    fromTime Integer
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext Map<String,Object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext Map<String,Object>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime Integer
    ETL job last modified time.
    logstore String
    The source logstore of the processing job.
    parameters Map<String,String>
    Advanced parameter configuration of processing operations.
    project String
    The name of the project where the etl job is located.
    roleArn String
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule String
    Job scheduling type, the default value is Resident.
    script String
    Processing operation grammar.
    status String
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime Integer
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version Integer
    Log etl job version. the default value is 2.
    accessKeyId string
    Source logstore access key id.
    accessKeySecret string
    Source logstore access key secret.
    createTime number
    The etl job create time.
    description string
    Description of the log etl job.
    displayName string
    Log service etl job alias.
    etlName string
    The name of the log etl job.
    etlSinks EtlEtlSink[]
    Target logstore configuration for delivery after data processing.
    etlType string
    Log service etl type, the default value is ETL.
    fromTime number
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext {[key: string]: any}
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext {[key: string]: any}
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime number
    ETL job last modified time.
    logstore string
    The source logstore of the processing job.
    parameters {[key: string]: string}
    Advanced parameter configuration of processing operations.
    project string
    The name of the project where the etl job is located.
    roleArn string
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule string
    Job scheduling type, the default value is Resident.
    script string
    Processing operation grammar.
    status string
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime number
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version number
    Log etl job version. the default value is 2.
    access_key_id str
    Source logstore access key id.
    access_key_secret str
    Source logstore access key secret.
    create_time int
    The etl job create time.
    description str
    Description of the log etl job.
    display_name str
    Log service etl job alias.
    etl_name str
    The name of the log etl job.
    etl_sinks Sequence[EtlEtlSinkArgs]
    Target logstore configuration for delivery after data processing.
    etl_type str
    Log service etl type, the default value is ETL.
    from_time int
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kms_encrypted_access_key_id str
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kms_encrypted_access_key_secret str
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kms_encryption_access_key_id_context Mapping[str, Any]
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kms_encryption_access_key_secret_context Mapping[str, Any]
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    last_modified_time int
    ETL job last modified time.
    logstore str
    The source logstore of the processing job.
    parameters Mapping[str, str]
    Advanced parameter configuration of processing operations.
    project str
    The name of the project where the etl job is located.
    role_arn str
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule str
    Job scheduling type, the default value is Resident.
    script str
    Processing operation grammar.
    status str
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    to_time int
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version int
    Log etl job version. the default value is 2.
    accessKeyId String
    Source logstore access key id.
    accessKeySecret String
    Source logstore access key secret.
    createTime Number
    The etl job create time.
    description String
    Description of the log etl job.
    displayName String
    Log service etl job alias.
    etlName String
    The name of the log etl job.
    etlSinks List<Property Map>
    Target logstore configuration for delivery after data processing.
    etlType String
    Log service etl type, the default value is ETL.
    fromTime Number
    The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    kmsEncryptionAccessKeyIdContext Map<Any>
    An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    kmsEncryptionAccessKeySecretContext Map<Any>
    An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.
    lastModifiedTime Number
    ETL job last modified time.
    logstore String
    The source logstore of the processing job.
    parameters Map<String>
    Advanced parameter configuration of processing operations.
    project String
    The name of the project where the etl job is located.
    roleArn String
    Sts role info under source logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    schedule String
    Job scheduling type, the default value is Resident.
    script String
    Processing operation grammar.
    status String
    Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.
    toTime Number
    Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
    version Number
    Log etl job version. the default value is 2.

    Supporting Types

    EtlEtlSink, EtlEtlSinkArgs

    Endpoint string
    Delivery target logstore region.
    Logstore string
    Delivery target logstore.
    Name string
    Delivery target name.
    Project string
    The project where the target logstore is delivered.
    AccessKeyId string
    Delivery target logstore access key id.
    AccessKeySecret string
    Delivery target logstore access key secret.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    RoleArn string
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Type string

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    Endpoint string
    Delivery target logstore region.
    Logstore string
    Delivery target logstore.
    Name string
    Delivery target name.
    Project string
    The project where the target logstore is delivered.
    AccessKeyId string
    Delivery target logstore access key id.
    AccessKeySecret string
    Delivery target logstore access key secret.
    KmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    KmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    RoleArn string
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    Type string

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    endpoint String
    Delivery target logstore region.
    logstore String
    Delivery target logstore.
    name String
    Delivery target name.
    project String
    The project where the target logstore is delivered.
    accessKeyId String
    Delivery target logstore access key id.
    accessKeySecret String
    Delivery target logstore access key secret.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    roleArn String
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    type String

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    endpoint string
    Delivery target logstore region.
    logstore string
    Delivery target logstore.
    name string
    Delivery target name.
    project string
    The project where the target logstore is delivered.
    accessKeyId string
    Delivery target logstore access key id.
    accessKeySecret string
    Delivery target logstore access key secret.
    kmsEncryptedAccessKeyId string
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret string
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    roleArn string
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    type string

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    endpoint str
    Delivery target logstore region.
    logstore str
    Delivery target logstore.
    name str
    Delivery target name.
    project str
    The project where the target logstore is delivered.
    access_key_id str
    Delivery target logstore access key id.
    access_key_secret str
    Delivery target logstore access key secret.
    kms_encrypted_access_key_id str
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kms_encrypted_access_key_secret str
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    role_arn str
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    type str

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    endpoint String
    Delivery target logstore region.
    logstore String
    Delivery target logstore.
    name String
    Delivery target name.
    project String
    The project where the target logstore is delivered.
    accessKeyId String
    Delivery target logstore access key id.
    accessKeySecret String
    Delivery target logstore access key secret.
    kmsEncryptedAccessKeyId String
    An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.
    kmsEncryptedAccessKeySecret String
    An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.
    roleArn String
    Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.
    type String

    ETL sinks type, the default value is AliyunLOG.

    Note: from_time and to_time no modification allowed after successful creation.

    Import

    Log etl can be imported using the id, e.g.

    $ pulumi import alicloud:log/etl:Etl example tf-log-project:tf-log-etl-name
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Alibaba Cloud pulumi/pulumi-alicloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the alicloud Terraform Provider.
    alicloud logo
    Alibaba Cloud v3.57.0 published on Saturday, Jun 15, 2024 by Pulumi