alicloud logo
Alibaba Cloud v3.34.0, Mar 17 23

alicloud.log.Etl

The data transformation of the log service is a hosted, highly available, and scalable data processing service, which is widely applicable to scenarios such as data regularization, enrichment, distribution, aggregation, and index reconstruction. Refer to details.

NOTE: Available in 1.120.0

Example Usage

Basic Usage

using System.Collections.Generic;
using Pulumi;
using AliCloud = Pulumi.AliCloud;

return await Deployment.RunAsync(() => 
{
    var exampleProject = new AliCloud.Log.Project("exampleProject", new()
    {
        Description = "created by terraform",
    });

    var exampleStore = new AliCloud.Log.Store("exampleStore", new()
    {
        Project = exampleProject.Name,
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });

    var example2 = new AliCloud.Log.Store("example2", new()
    {
        Project = exampleProject.Name,
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });

    var example3 = new AliCloud.Log.Store("example3", new()
    {
        Project = exampleProject.Name,
        RetentionPeriod = 3650,
        ShardCount = 3,
        AutoSplit = true,
        MaxSplitShardCount = 60,
        AppendMeta = true,
    });

    var exampleEtl = new AliCloud.Log.Etl("exampleEtl", new()
    {
        EtlName = "etl_name",
        Project = exampleProject.Name,
        DisplayName = "display_name",
        Description = "etl_description",
        AccessKeyId = "access_key_id",
        AccessKeySecret = "access_key_secret",
        Script = "e_set('new','key')",
        Logstore = exampleStore.Name,
        EtlSinks = new[]
        {
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name",
                AccessKeyId = "example2_access_key_id",
                AccessKeySecret = "example2_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = exampleProject.Name,
                Logstore = example2.Name,
            },
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name2",
                AccessKeyId = "example3_access_key_id",
                AccessKeySecret = "example3_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = exampleProject.Name,
                Logstore = example3.Name,
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleProject, err := log.NewProject(ctx, "exampleProject", &log.ProjectArgs{
			Description: pulumi.String("created by terraform"),
		})
		if err != nil {
			return err
		}
		exampleStore, err := log.NewStore(ctx, "exampleStore", &log.StoreArgs{
			Project:            exampleProject.Name,
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		example2, err := log.NewStore(ctx, "example2", &log.StoreArgs{
			Project:            exampleProject.Name,
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		example3, err := log.NewStore(ctx, "example3", &log.StoreArgs{
			Project:            exampleProject.Name,
			RetentionPeriod:    pulumi.Int(3650),
			ShardCount:         pulumi.Int(3),
			AutoSplit:          pulumi.Bool(true),
			MaxSplitShardCount: pulumi.Int(60),
			AppendMeta:         pulumi.Bool(true),
		})
		if err != nil {
			return err
		}
		_, err = log.NewEtl(ctx, "exampleEtl", &log.EtlArgs{
			EtlName:         pulumi.String("etl_name"),
			Project:         exampleProject.Name,
			DisplayName:     pulumi.String("display_name"),
			Description:     pulumi.String("etl_description"),
			AccessKeyId:     pulumi.String("access_key_id"),
			AccessKeySecret: pulumi.String("access_key_secret"),
			Script:          pulumi.String("e_set('new','key')"),
			Logstore:        exampleStore.Name,
			EtlSinks: log.EtlEtlSinkArray{
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name"),
					AccessKeyId:     pulumi.String("example2_access_key_id"),
					AccessKeySecret: pulumi.String("example2_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         exampleProject.Name,
					Logstore:        example2.Name,
				},
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name2"),
					AccessKeyId:     pulumi.String("example3_access_key_id"),
					AccessKeySecret: pulumi.String("example3_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         exampleProject.Name,
					Logstore:        example3.Name,
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.log.Etl;
import com.pulumi.alicloud.log.EtlArgs;
import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var exampleProject = new Project("exampleProject", ProjectArgs.builder()        
            .description("created by terraform")
            .build());

        var exampleStore = new Store("exampleStore", StoreArgs.builder()        
            .project(exampleProject.name())
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());

        var example2 = new Store("example2", StoreArgs.builder()        
            .project(exampleProject.name())
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());

        var example3 = new Store("example3", StoreArgs.builder()        
            .project(exampleProject.name())
            .retentionPeriod(3650)
            .shardCount(3)
            .autoSplit(true)
            .maxSplitShardCount(60)
            .appendMeta(true)
            .build());

        var exampleEtl = new Etl("exampleEtl", EtlArgs.builder()        
            .etlName("etl_name")
            .project(exampleProject.name())
            .displayName("display_name")
            .description("etl_description")
            .accessKeyId("access_key_id")
            .accessKeySecret("access_key_secret")
            .script("e_set('new','key')")
            .logstore(exampleStore.name())
            .etlSinks(            
                EtlEtlSinkArgs.builder()
                    .name("target_name")
                    .accessKeyId("example2_access_key_id")
                    .accessKeySecret("example2_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(exampleProject.name())
                    .logstore(example2.name())
                    .build(),
                EtlEtlSinkArgs.builder()
                    .name("target_name2")
                    .accessKeyId("example3_access_key_id")
                    .accessKeySecret("example3_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(exampleProject.name())
                    .logstore(example3.name())
                    .build())
            .build());

    }
}
import pulumi
import pulumi_alicloud as alicloud

example_project = alicloud.log.Project("exampleProject", description="created by terraform")
example_store = alicloud.log.Store("exampleStore",
    project=example_project.name,
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example2 = alicloud.log.Store("example2",
    project=example_project.name,
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example3 = alicloud.log.Store("example3",
    project=example_project.name,
    retention_period=3650,
    shard_count=3,
    auto_split=True,
    max_split_shard_count=60,
    append_meta=True)
example_etl = alicloud.log.Etl("exampleEtl",
    etl_name="etl_name",
    project=example_project.name,
    display_name="display_name",
    description="etl_description",
    access_key_id="access_key_id",
    access_key_secret="access_key_secret",
    script="e_set('new','key')",
    logstore=example_store.name,
    etl_sinks=[
        alicloud.log.EtlEtlSinkArgs(
            name="target_name",
            access_key_id="example2_access_key_id",
            access_key_secret="example2_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=example_project.name,
            logstore=example2.name,
        ),
        alicloud.log.EtlEtlSinkArgs(
            name="target_name2",
            access_key_id="example3_access_key_id",
            access_key_secret="example3_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=example_project.name,
            logstore=example3.name,
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";

const exampleProject = new alicloud.log.Project("exampleProject", {description: "created by terraform"});
const exampleStore = new alicloud.log.Store("exampleStore", {
    project: exampleProject.name,
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const example2 = new alicloud.log.Store("example2", {
    project: exampleProject.name,
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const example3 = new alicloud.log.Store("example3", {
    project: exampleProject.name,
    retentionPeriod: 3650,
    shardCount: 3,
    autoSplit: true,
    maxSplitShardCount: 60,
    appendMeta: true,
});
const exampleEtl = new alicloud.log.Etl("exampleEtl", {
    etlName: "etl_name",
    project: exampleProject.name,
    displayName: "display_name",
    description: "etl_description",
    accessKeyId: "access_key_id",
    accessKeySecret: "access_key_secret",
    script: "e_set('new','key')",
    logstore: exampleStore.name,
    etlSinks: [
        {
            name: "target_name",
            accessKeyId: "example2_access_key_id",
            accessKeySecret: "example2_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: exampleProject.name,
            logstore: example2.name,
        },
        {
            name: "target_name2",
            accessKeyId: "example3_access_key_id",
            accessKeySecret: "example3_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: exampleProject.name,
            logstore: example3.name,
        },
    ],
});
resources:
  exampleProject:
    type: alicloud:log:Project
    properties:
      description: created by terraform
  exampleStore:
    type: alicloud:log:Store
    properties:
      project: ${exampleProject.name}
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  example2:
    type: alicloud:log:Store
    properties:
      project: ${exampleProject.name}
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  example3:
    type: alicloud:log:Store
    properties:
      project: ${exampleProject.name}
      retentionPeriod: 3650
      shardCount: 3
      autoSplit: true
      maxSplitShardCount: 60
      appendMeta: true
  exampleEtl:
    type: alicloud:log:Etl
    properties:
      etlName: etl_name
      project: ${exampleProject.name}
      displayName: display_name
      description: etl_description
      accessKeyId: access_key_id
      accessKeySecret: access_key_secret
      script: e_set('new','key')
      logstore: ${exampleStore.name}
      etlSinks:
        - name: target_name
          accessKeyId: example2_access_key_id
          accessKeySecret: example2_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${exampleProject.name}
          logstore: ${example2.name}
        - name: target_name2
          accessKeyId: example3_access_key_id
          accessKeySecret: example3_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${exampleProject.name}
          logstore: ${example3.name}

Stop the task in progress

using System.Collections.Generic;
using Pulumi;
using AliCloud = Pulumi.AliCloud;

return await Deployment.RunAsync(() => 
{
    var example = new AliCloud.Log.Etl("example", new()
    {
        Status = STOPPED,
        EtlName = "etl_name",
        Project = alicloud_log_project.Example.Name,
        DisplayName = "display_name",
        Description = "etl_description",
        AccessKeyId = "access_key_id",
        AccessKeySecret = "access_key_secret",
        Script = "e_set('new','key')",
        Logstore = alicloud_log_store.Example.Name,
        EtlSinks = new[]
        {
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name",
                AccessKeyId = "example2_access_key_id",
                AccessKeySecret = "example2_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = alicloud_log_project.Example.Name,
                Logstore = alicloud_log_store.Example2.Name,
            },
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name2",
                AccessKeyId = "example3_access_key_id",
                AccessKeySecret = "example3_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = alicloud_log_project.Example.Name,
                Logstore = alicloud_log_store.Example3.Name,
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := log.NewEtl(ctx, "example", &log.EtlArgs{
			Status:          pulumi.Any(STOPPED),
			EtlName:         pulumi.String("etl_name"),
			Project:         pulumi.Any(alicloud_log_project.Example.Name),
			DisplayName:     pulumi.String("display_name"),
			Description:     pulumi.String("etl_description"),
			AccessKeyId:     pulumi.String("access_key_id"),
			AccessKeySecret: pulumi.String("access_key_secret"),
			Script:          pulumi.String("e_set('new','key')"),
			Logstore:        pulumi.Any(alicloud_log_store.Example.Name),
			EtlSinks: log.EtlEtlSinkArray{
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name"),
					AccessKeyId:     pulumi.String("example2_access_key_id"),
					AccessKeySecret: pulumi.String("example2_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         pulumi.Any(alicloud_log_project.Example.Name),
					Logstore:        pulumi.Any(alicloud_log_store.Example2.Name),
				},
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name2"),
					AccessKeyId:     pulumi.String("example3_access_key_id"),
					AccessKeySecret: pulumi.String("example3_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         pulumi.Any(alicloud_log_project.Example.Name),
					Logstore:        pulumi.Any(alicloud_log_store.Example3.Name),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.log.Etl;
import com.pulumi.alicloud.log.EtlArgs;
import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new Etl("example", EtlArgs.builder()        
            .status(STOPPED)
            .etlName("etl_name")
            .project(alicloud_log_project.example().name())
            .displayName("display_name")
            .description("etl_description")
            .accessKeyId("access_key_id")
            .accessKeySecret("access_key_secret")
            .script("e_set('new','key')")
            .logstore(alicloud_log_store.example().name())
            .etlSinks(            
                EtlEtlSinkArgs.builder()
                    .name("target_name")
                    .accessKeyId("example2_access_key_id")
                    .accessKeySecret("example2_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(alicloud_log_project.example().name())
                    .logstore(alicloud_log_store.example2().name())
                    .build(),
                EtlEtlSinkArgs.builder()
                    .name("target_name2")
                    .accessKeyId("example3_access_key_id")
                    .accessKeySecret("example3_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(alicloud_log_project.example().name())
                    .logstore(alicloud_log_store.example3().name())
                    .build())
            .build());

    }
}
import pulumi
import pulumi_alicloud as alicloud

example = alicloud.log.Etl("example",
    status=stopped,
    etl_name="etl_name",
    project=alicloud_log_project["example"]["name"],
    display_name="display_name",
    description="etl_description",
    access_key_id="access_key_id",
    access_key_secret="access_key_secret",
    script="e_set('new','key')",
    logstore=alicloud_log_store["example"]["name"],
    etl_sinks=[
        alicloud.log.EtlEtlSinkArgs(
            name="target_name",
            access_key_id="example2_access_key_id",
            access_key_secret="example2_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=alicloud_log_project["example"]["name"],
            logstore=alicloud_log_store["example2"]["name"],
        ),
        alicloud.log.EtlEtlSinkArgs(
            name="target_name2",
            access_key_id="example3_access_key_id",
            access_key_secret="example3_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=alicloud_log_project["example"]["name"],
            logstore=alicloud_log_store["example3"]["name"],
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";

const example = new alicloud.log.Etl("example", {
    status: STOPPED,
    etlName: "etl_name",
    project: alicloud_log_project.example.name,
    displayName: "display_name",
    description: "etl_description",
    accessKeyId: "access_key_id",
    accessKeySecret: "access_key_secret",
    script: "e_set('new','key')",
    logstore: alicloud_log_store.example.name,
    etlSinks: [
        {
            name: "target_name",
            accessKeyId: "example2_access_key_id",
            accessKeySecret: "example2_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: alicloud_log_project.example.name,
            logstore: alicloud_log_store.example2.name,
        },
        {
            name: "target_name2",
            accessKeyId: "example3_access_key_id",
            accessKeySecret: "example3_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: alicloud_log_project.example.name,
            logstore: alicloud_log_store.example3.name,
        },
    ],
});
resources:
  example:
    type: alicloud:log:Etl
    properties:
      status: ${STOPPED}
      etlName: etl_name
      project: ${alicloud_log_project.example.name}
      displayName: display_name
      description: etl_description
      accessKeyId: access_key_id
      accessKeySecret: access_key_secret
      script: e_set('new','key')
      logstore: ${alicloud_log_store.example.name}
      etlSinks:
        - name: target_name
          accessKeyId: example2_access_key_id
          accessKeySecret: example2_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${alicloud_log_project.example.name}
          logstore: ${alicloud_log_store.example2.name}
        - name: target_name2
          accessKeyId: example3_access_key_id
          accessKeySecret: example3_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${alicloud_log_project.example.name}
          logstore: ${alicloud_log_store.example3.name}

ReStart the stopped task

using System.Collections.Generic;
using Pulumi;
using AliCloud = Pulumi.AliCloud;

return await Deployment.RunAsync(() => 
{
    var example = new AliCloud.Log.Etl("example", new()
    {
        Status = RUNNING,
        EtlName = "etl_name",
        Project = alicloud_log_project.Example.Name,
        DisplayName = "display_name",
        Description = "etl_description",
        AccessKeyId = "access_key_id",
        AccessKeySecret = "access_key_secret",
        Script = "e_set('new','key')",
        Logstore = alicloud_log_store.Example.Name,
        EtlSinks = new[]
        {
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name",
                AccessKeyId = "example2_access_key_id",
                AccessKeySecret = "example2_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = alicloud_log_project.Example.Name,
                Logstore = alicloud_log_store.Example2.Name,
            },
            new AliCloud.Log.Inputs.EtlEtlSinkArgs
            {
                Name = "target_name2",
                AccessKeyId = "example3_access_key_id",
                AccessKeySecret = "example3_access_key_secret",
                Endpoint = "cn-hangzhou.log.aliyuncs.com",
                Project = alicloud_log_project.Example.Name,
                Logstore = alicloud_log_store.Example3.Name,
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := log.NewEtl(ctx, "example", &log.EtlArgs{
			Status:          pulumi.Any(RUNNING),
			EtlName:         pulumi.String("etl_name"),
			Project:         pulumi.Any(alicloud_log_project.Example.Name),
			DisplayName:     pulumi.String("display_name"),
			Description:     pulumi.String("etl_description"),
			AccessKeyId:     pulumi.String("access_key_id"),
			AccessKeySecret: pulumi.String("access_key_secret"),
			Script:          pulumi.String("e_set('new','key')"),
			Logstore:        pulumi.Any(alicloud_log_store.Example.Name),
			EtlSinks: log.EtlEtlSinkArray{
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name"),
					AccessKeyId:     pulumi.String("example2_access_key_id"),
					AccessKeySecret: pulumi.String("example2_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         pulumi.Any(alicloud_log_project.Example.Name),
					Logstore:        pulumi.Any(alicloud_log_store.Example2.Name),
				},
				&log.EtlEtlSinkArgs{
					Name:            pulumi.String("target_name2"),
					AccessKeyId:     pulumi.String("example3_access_key_id"),
					AccessKeySecret: pulumi.String("example3_access_key_secret"),
					Endpoint:        pulumi.String("cn-hangzhou.log.aliyuncs.com"),
					Project:         pulumi.Any(alicloud_log_project.Example.Name),
					Logstore:        pulumi.Any(alicloud_log_store.Example3.Name),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.log.Etl;
import com.pulumi.alicloud.log.EtlArgs;
import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var example = new Etl("example", EtlArgs.builder()        
            .status(RUNNING)
            .etlName("etl_name")
            .project(alicloud_log_project.example().name())
            .displayName("display_name")
            .description("etl_description")
            .accessKeyId("access_key_id")
            .accessKeySecret("access_key_secret")
            .script("e_set('new','key')")
            .logstore(alicloud_log_store.example().name())
            .etlSinks(            
                EtlEtlSinkArgs.builder()
                    .name("target_name")
                    .accessKeyId("example2_access_key_id")
                    .accessKeySecret("example2_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(alicloud_log_project.example().name())
                    .logstore(alicloud_log_store.example2().name())
                    .build(),
                EtlEtlSinkArgs.builder()
                    .name("target_name2")
                    .accessKeyId("example3_access_key_id")
                    .accessKeySecret("example3_access_key_secret")
                    .endpoint("cn-hangzhou.log.aliyuncs.com")
                    .project(alicloud_log_project.example().name())
                    .logstore(alicloud_log_store.example3().name())
                    .build())
            .build());

    }
}
import pulumi
import pulumi_alicloud as alicloud

example = alicloud.log.Etl("example",
    status=running,
    etl_name="etl_name",
    project=alicloud_log_project["example"]["name"],
    display_name="display_name",
    description="etl_description",
    access_key_id="access_key_id",
    access_key_secret="access_key_secret",
    script="e_set('new','key')",
    logstore=alicloud_log_store["example"]["name"],
    etl_sinks=[
        alicloud.log.EtlEtlSinkArgs(
            name="target_name",
            access_key_id="example2_access_key_id",
            access_key_secret="example2_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=alicloud_log_project["example"]["name"],
            logstore=alicloud_log_store["example2"]["name"],
        ),
        alicloud.log.EtlEtlSinkArgs(
            name="target_name2",
            access_key_id="example3_access_key_id",
            access_key_secret="example3_access_key_secret",
            endpoint="cn-hangzhou.log.aliyuncs.com",
            project=alicloud_log_project["example"]["name"],
            logstore=alicloud_log_store["example3"]["name"],
        ),
    ])
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";

const example = new alicloud.log.Etl("example", {
    status: RUNNING,
    etlName: "etl_name",
    project: alicloud_log_project.example.name,
    displayName: "display_name",
    description: "etl_description",
    accessKeyId: "access_key_id",
    accessKeySecret: "access_key_secret",
    script: "e_set('new','key')",
    logstore: alicloud_log_store.example.name,
    etlSinks: [
        {
            name: "target_name",
            accessKeyId: "example2_access_key_id",
            accessKeySecret: "example2_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: alicloud_log_project.example.name,
            logstore: alicloud_log_store.example2.name,
        },
        {
            name: "target_name2",
            accessKeyId: "example3_access_key_id",
            accessKeySecret: "example3_access_key_secret",
            endpoint: "cn-hangzhou.log.aliyuncs.com",
            project: alicloud_log_project.example.name,
            logstore: alicloud_log_store.example3.name,
        },
    ],
});
resources:
  example:
    type: alicloud:log:Etl
    properties:
      status: ${RUNNING}
      etlName: etl_name
      project: ${alicloud_log_project.example.name}
      displayName: display_name
      description: etl_description
      accessKeyId: access_key_id
      accessKeySecret: access_key_secret
      script: e_set('new','key')
      logstore: ${alicloud_log_store.example.name}
      etlSinks:
        - name: target_name
          accessKeyId: example2_access_key_id
          accessKeySecret: example2_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${alicloud_log_project.example.name}
          logstore: ${alicloud_log_store.example2.name}
        - name: target_name2
          accessKeyId: example3_access_key_id
          accessKeySecret: example3_access_key_secret
          endpoint: cn-hangzhou.log.aliyuncs.com
          project: ${alicloud_log_project.example.name}
          logstore: ${alicloud_log_store.example3.name}

Create Etl Resource

new Etl(name: string, args: EtlArgs, opts?: CustomResourceOptions);
@overload
def Etl(resource_name: str,
        opts: Optional[ResourceOptions] = None,
        access_key_id: Optional[str] = None,
        access_key_secret: Optional[str] = None,
        create_time: Optional[int] = None,
        description: Optional[str] = None,
        display_name: Optional[str] = None,
        etl_name: Optional[str] = None,
        etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
        etl_type: Optional[str] = None,
        from_time: Optional[int] = None,
        kms_encrypted_access_key_id: Optional[str] = None,
        kms_encrypted_access_key_secret: Optional[str] = None,
        kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
        kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
        last_modified_time: Optional[int] = None,
        logstore: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        project: Optional[str] = None,
        role_arn: Optional[str] = None,
        schedule: Optional[str] = None,
        script: Optional[str] = None,
        status: Optional[str] = None,
        to_time: Optional[int] = None,
        version: Optional[int] = None)
@overload
def Etl(resource_name: str,
        args: EtlArgs,
        opts: Optional[ResourceOptions] = None)
func NewEtl(ctx *Context, name string, args EtlArgs, opts ...ResourceOption) (*Etl, error)
public Etl(string name, EtlArgs args, CustomResourceOptions? opts = null)
public Etl(String name, EtlArgs args)
public Etl(String name, EtlArgs args, CustomResourceOptions options)
type: alicloud:log:Etl
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args EtlArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args EtlArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args EtlArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args EtlArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args EtlArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Etl Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The Etl resource accepts the following input properties:

DisplayName string

Log service etl job alias.

EtlName string

The name of the log etl job.

EtlSinks List<Pulumi.AliCloud.Log.Inputs.EtlEtlSinkArgs>

Target logstore configuration for delivery after data processing.

Logstore string

Delivery target logstore.

Project string

The project where the target logstore is delivered.

Script string

Processing operation grammar.

AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

CreateTime int

The etl job create time.

Description string

Description of the log etl job.

EtlType string

Log service etl type, the default value is ETL.

FromTime int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

KmsEncryptionAccessKeyIdContext Dictionary<string, object>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

KmsEncryptionAccessKeySecretContext Dictionary<string, object>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

LastModifiedTime int

ETL job last modified time.

Parameters Dictionary<string, string>

Advanced parameter configuration of processing operations.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Schedule string

Job scheduling type, the default value is Resident.

Status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

ToTime int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

Version int

Log etl job version. the default value is 2.

DisplayName string

Log service etl job alias.

EtlName string

The name of the log etl job.

EtlSinks []EtlEtlSinkArgs

Target logstore configuration for delivery after data processing.

Logstore string

Delivery target logstore.

Project string

The project where the target logstore is delivered.

Script string

Processing operation grammar.

AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

CreateTime int

The etl job create time.

Description string

Description of the log etl job.

EtlType string

Log service etl type, the default value is ETL.

FromTime int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

KmsEncryptionAccessKeyIdContext map[string]interface{}

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

KmsEncryptionAccessKeySecretContext map[string]interface{}

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

LastModifiedTime int

ETL job last modified time.

Parameters map[string]string

Advanced parameter configuration of processing operations.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Schedule string

Job scheduling type, the default value is Resident.

Status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

ToTime int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

Version int

Log etl job version. the default value is 2.

displayName String

Log service etl job alias.

etlName String

The name of the log etl job.

etlSinks List<EtlEtlSinkArgs>

Target logstore configuration for delivery after data processing.

logstore String

Delivery target logstore.

project String

The project where the target logstore is delivered.

script String

Processing operation grammar.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

createTime Integer

The etl job create time.

description String

Description of the log etl job.

etlType String

Log service etl type, the default value is ETL.

fromTime Integer

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext Map<String,Object>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext Map<String,Object>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime Integer

ETL job last modified time.

parameters Map<String,String>

Advanced parameter configuration of processing operations.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule String

Job scheduling type, the default value is Resident.

status String

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime Integer

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version Integer

Log etl job version. the default value is 2.

displayName string

Log service etl job alias.

etlName string

The name of the log etl job.

etlSinks EtlEtlSinkArgs[]

Target logstore configuration for delivery after data processing.

logstore string

Delivery target logstore.

project string

The project where the target logstore is delivered.

script string

Processing operation grammar.

accessKeyId string

Delivery target logstore access key id.

accessKeySecret string

Delivery target logstore access key secret.

createTime number

The etl job create time.

description string

Description of the log etl job.

etlType string

Log service etl type, the default value is ETL.

fromTime number

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext {[key: string]: any}

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext {[key: string]: any}

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime number

ETL job last modified time.

parameters {[key: string]: string}

Advanced parameter configuration of processing operations.

roleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule string

Job scheduling type, the default value is Resident.

status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime number

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version number

Log etl job version. the default value is 2.

display_name str

Log service etl job alias.

etl_name str

The name of the log etl job.

etl_sinks Sequence[EtlEtlSinkArgs]

Target logstore configuration for delivery after data processing.

logstore str

Delivery target logstore.

project str

The project where the target logstore is delivered.

script str

Processing operation grammar.

access_key_id str

Delivery target logstore access key id.

access_key_secret str

Delivery target logstore access key secret.

create_time int

The etl job create time.

description str

Description of the log etl job.

etl_type str

Log service etl type, the default value is ETL.

from_time int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kms_encrypted_access_key_id str

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kms_encrypted_access_key_secret str

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kms_encryption_access_key_id_context Mapping[str, Any]

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kms_encryption_access_key_secret_context Mapping[str, Any]

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

last_modified_time int

ETL job last modified time.

parameters Mapping[str, str]

Advanced parameter configuration of processing operations.

role_arn str

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule str

Job scheduling type, the default value is Resident.

status str

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

to_time int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version int

Log etl job version. the default value is 2.

displayName String

Log service etl job alias.

etlName String

The name of the log etl job.

etlSinks List<Property Map>

Target logstore configuration for delivery after data processing.

logstore String

Delivery target logstore.

project String

The project where the target logstore is delivered.

script String

Processing operation grammar.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

createTime Number

The etl job create time.

description String

Description of the log etl job.

etlType String

Log service etl type, the default value is ETL.

fromTime Number

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext Map<Any>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext Map<Any>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime Number

ETL job last modified time.

parameters Map<String>

Advanced parameter configuration of processing operations.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule String

Job scheduling type, the default value is Resident.

status String

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime Number

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version Number

Log etl job version. the default value is 2.

Outputs

All input properties are implicitly available as output properties. Additionally, the Etl resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing Etl Resource

Get an existing Etl resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: EtlState, opts?: CustomResourceOptions): Etl
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        access_key_id: Optional[str] = None,
        access_key_secret: Optional[str] = None,
        create_time: Optional[int] = None,
        description: Optional[str] = None,
        display_name: Optional[str] = None,
        etl_name: Optional[str] = None,
        etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
        etl_type: Optional[str] = None,
        from_time: Optional[int] = None,
        kms_encrypted_access_key_id: Optional[str] = None,
        kms_encrypted_access_key_secret: Optional[str] = None,
        kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
        kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
        last_modified_time: Optional[int] = None,
        logstore: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        project: Optional[str] = None,
        role_arn: Optional[str] = None,
        schedule: Optional[str] = None,
        script: Optional[str] = None,
        status: Optional[str] = None,
        to_time: Optional[int] = None,
        version: Optional[int] = None) -> Etl
func GetEtl(ctx *Context, name string, id IDInput, state *EtlState, opts ...ResourceOption) (*Etl, error)
public static Etl Get(string name, Input<string> id, EtlState? state, CustomResourceOptions? opts = null)
public static Etl get(String name, Output<String> id, EtlState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

CreateTime int

The etl job create time.

Description string

Description of the log etl job.

DisplayName string

Log service etl job alias.

EtlName string

The name of the log etl job.

EtlSinks List<Pulumi.AliCloud.Log.Inputs.EtlEtlSinkArgs>

Target logstore configuration for delivery after data processing.

EtlType string

Log service etl type, the default value is ETL.

FromTime int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

KmsEncryptionAccessKeyIdContext Dictionary<string, object>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

KmsEncryptionAccessKeySecretContext Dictionary<string, object>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

LastModifiedTime int

ETL job last modified time.

Logstore string

Delivery target logstore.

Parameters Dictionary<string, string>

Advanced parameter configuration of processing operations.

Project string

The project where the target logstore is delivered.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Schedule string

Job scheduling type, the default value is Resident.

Script string

Processing operation grammar.

Status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

ToTime int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

Version int

Log etl job version. the default value is 2.

AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

CreateTime int

The etl job create time.

Description string

Description of the log etl job.

DisplayName string

Log service etl job alias.

EtlName string

The name of the log etl job.

EtlSinks []EtlEtlSinkArgs

Target logstore configuration for delivery after data processing.

EtlType string

Log service etl type, the default value is ETL.

FromTime int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

KmsEncryptionAccessKeyIdContext map[string]interface{}

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

KmsEncryptionAccessKeySecretContext map[string]interface{}

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

LastModifiedTime int

ETL job last modified time.

Logstore string

Delivery target logstore.

Parameters map[string]string

Advanced parameter configuration of processing operations.

Project string

The project where the target logstore is delivered.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Schedule string

Job scheduling type, the default value is Resident.

Script string

Processing operation grammar.

Status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

ToTime int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

Version int

Log etl job version. the default value is 2.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

createTime Integer

The etl job create time.

description String

Description of the log etl job.

displayName String

Log service etl job alias.

etlName String

The name of the log etl job.

etlSinks List<EtlEtlSinkArgs>

Target logstore configuration for delivery after data processing.

etlType String

Log service etl type, the default value is ETL.

fromTime Integer

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext Map<String,Object>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext Map<String,Object>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime Integer

ETL job last modified time.

logstore String

Delivery target logstore.

parameters Map<String,String>

Advanced parameter configuration of processing operations.

project String

The project where the target logstore is delivered.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule String

Job scheduling type, the default value is Resident.

script String

Processing operation grammar.

status String

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime Integer

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version Integer

Log etl job version. the default value is 2.

accessKeyId string

Delivery target logstore access key id.

accessKeySecret string

Delivery target logstore access key secret.

createTime number

The etl job create time.

description string

Description of the log etl job.

displayName string

Log service etl job alias.

etlName string

The name of the log etl job.

etlSinks EtlEtlSinkArgs[]

Target logstore configuration for delivery after data processing.

etlType string

Log service etl type, the default value is ETL.

fromTime number

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext {[key: string]: any}

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext {[key: string]: any}

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime number

ETL job last modified time.

logstore string

Delivery target logstore.

parameters {[key: string]: string}

Advanced parameter configuration of processing operations.

project string

The project where the target logstore is delivered.

roleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule string

Job scheduling type, the default value is Resident.

script string

Processing operation grammar.

status string

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime number

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version number

Log etl job version. the default value is 2.

access_key_id str

Delivery target logstore access key id.

access_key_secret str

Delivery target logstore access key secret.

create_time int

The etl job create time.

description str

Description of the log etl job.

display_name str

Log service etl job alias.

etl_name str

The name of the log etl job.

etl_sinks Sequence[EtlEtlSinkArgs]

Target logstore configuration for delivery after data processing.

etl_type str

Log service etl type, the default value is ETL.

from_time int

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kms_encrypted_access_key_id str

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kms_encrypted_access_key_secret str

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kms_encryption_access_key_id_context Mapping[str, Any]

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kms_encryption_access_key_secret_context Mapping[str, Any]

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

last_modified_time int

ETL job last modified time.

logstore str

Delivery target logstore.

parameters Mapping[str, str]

Advanced parameter configuration of processing operations.

project str

The project where the target logstore is delivered.

role_arn str

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule str

Job scheduling type, the default value is Resident.

script str

Processing operation grammar.

status str

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

to_time int

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version int

Log etl job version. the default value is 2.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

createTime Number

The etl job create time.

description String

Description of the log etl job.

displayName String

Log service etl job alias.

etlName String

The name of the log etl job.

etlSinks List<Property Map>

Target logstore configuration for delivery after data processing.

etlType String

Log service etl type, the default value is ETL.

fromTime Number

The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

kmsEncryptionAccessKeyIdContext Map<Any>

An KMS encryption context used to decrypt kms_encrypted_access_key_id before creating or updating an instance with kms_encrypted_access_key_id. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

kmsEncryptionAccessKeySecretContext Map<Any>

An KMS encryption context used to decrypt kms_encrypted_access_key_secret before creating or updating an instance with kms_encrypted_access_key_secret. See Encryption Context. It is valid when kms_encrypted_password is set. When it is changed, the instance will reboot to make the change take effect.

lastModifiedTime Number

ETL job last modified time.

logstore String

Delivery target logstore.

parameters Map<String>

Advanced parameter configuration of processing operations.

project String

The project where the target logstore is delivered.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

schedule String

Job scheduling type, the default value is Resident.

script String

Processing operation grammar.

status String

Log project tags. the default value is RUNNING, Only 4 values are supported: STARTINGRUNNINGSTOPPINGSTOPPED.

toTime Number

Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.

version Number

Log etl job version. the default value is 2.

Supporting Types

EtlEtlSink

Endpoint string

Delivery target logstore region.

Logstore string

Delivery target logstore.

Name string

Delivery target name.

Project string

The project where the target logstore is delivered.

AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Type string

ETL sinks type, the default value is AliyunLOG.

Endpoint string

Delivery target logstore region.

Logstore string

Delivery target logstore.

Name string

Delivery target name.

Project string

The project where the target logstore is delivered.

AccessKeyId string

Delivery target logstore access key id.

AccessKeySecret string

Delivery target logstore access key secret.

KmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

KmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

RoleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

Type string

ETL sinks type, the default value is AliyunLOG.

endpoint String

Delivery target logstore region.

logstore String

Delivery target logstore.

name String

Delivery target name.

project String

The project where the target logstore is delivered.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

type String

ETL sinks type, the default value is AliyunLOG.

endpoint string

Delivery target logstore region.

logstore string

Delivery target logstore.

name string

Delivery target name.

project string

The project where the target logstore is delivered.

accessKeyId string

Delivery target logstore access key id.

accessKeySecret string

Delivery target logstore access key secret.

kmsEncryptedAccessKeyId string

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret string

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

roleArn string

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

type string

ETL sinks type, the default value is AliyunLOG.

endpoint str

Delivery target logstore region.

logstore str

Delivery target logstore.

name str

Delivery target name.

project str

The project where the target logstore is delivered.

access_key_id str

Delivery target logstore access key id.

access_key_secret str

Delivery target logstore access key secret.

kms_encrypted_access_key_id str

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kms_encrypted_access_key_secret str

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

role_arn str

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

type str

ETL sinks type, the default value is AliyunLOG.

endpoint String

Delivery target logstore region.

logstore String

Delivery target logstore.

name String

Delivery target name.

project String

The project where the target logstore is delivered.

accessKeyId String

Delivery target logstore access key id.

accessKeySecret String

Delivery target logstore access key secret.

kmsEncryptedAccessKeyId String

An KMS encrypts access key id used to a log etl job. If the access_key_id is filled in, this field will be ignored.

kmsEncryptedAccessKeySecret String

An KMS encrypts access key secret used to a log etl job. If the access_key_secret is filled in, this field will be ignored.

roleArn String

Sts role info under delivery target logstore. role_arn and (access_key_id, access_key_secret) fill in at most one. If you do not fill in both, then you must fill in (kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context) to use KMS to get the key pair.

type String

ETL sinks type, the default value is AliyunLOG.

Import

Log etl can be imported using the id, e.g.

 $ pulumi import alicloud:log/etl:Etl example tf-log-project:tf-log-etl-name

Package Details

Repository
Alibaba Cloud pulumi/pulumi-alicloud
License
Apache-2.0
Notes

This Pulumi package is based on the alicloud Terraform Provider.