alicloud.log.Etl
Explore with Pulumi AI
The data transformation of the log service is a hosted, highly available, and scalable data processing service, which is widely applicable to scenarios such as data regularization, enrichment, distribution, aggregation, and index reconstruction. Refer to details.
NOTE: Available in 1.120.0
Example Usage
Basic Usage
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var @default = new Random.RandomInteger("default", new()
{
Max = 99999,
Min = 10000,
});
var exampleProject = new AliCloud.Log.Project("exampleProject", new()
{
Description = "terraform-example",
});
var exampleStore = new AliCloud.Log.Store("exampleStore", new()
{
Project = exampleProject.Name,
RetentionPeriod = 3650,
ShardCount = 3,
AutoSplit = true,
MaxSplitShardCount = 60,
AppendMeta = true,
});
var example2 = new AliCloud.Log.Store("example2", new()
{
Project = exampleProject.Name,
RetentionPeriod = 3650,
ShardCount = 3,
AutoSplit = true,
MaxSplitShardCount = 60,
AppendMeta = true,
});
var example3 = new AliCloud.Log.Store("example3", new()
{
Project = exampleProject.Name,
RetentionPeriod = 3650,
ShardCount = 3,
AutoSplit = true,
MaxSplitShardCount = 60,
AppendMeta = true,
});
var exampleEtl = new AliCloud.Log.Etl("exampleEtl", new()
{
EtlName = "terraform-example",
Project = exampleProject.Name,
DisplayName = "terraform-example",
Description = "terraform-example",
AccessKeyId = "access_key_id",
AccessKeySecret = "access_key_secret",
Script = "e_set('new','key')",
Logstore = exampleStore.Name,
EtlSinks = new[]
{
new AliCloud.Log.Inputs.EtlEtlSinkArgs
{
Name = "target_name",
AccessKeyId = "example2_access_key_id",
AccessKeySecret = "example2_access_key_secret",
Endpoint = "cn-hangzhou.log.aliyuncs.com",
Project = exampleProject.Name,
Logstore = example2.Name,
},
new AliCloud.Log.Inputs.EtlEtlSinkArgs
{
Name = "target_name2",
AccessKeyId = "example3_access_key_id",
AccessKeySecret = "example3_access_key_secret",
Endpoint = "cn-hangzhou.log.aliyuncs.com",
Project = exampleProject.Name,
Logstore = example3.Name,
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := random.NewRandomInteger(ctx, "default", &random.RandomIntegerArgs{
Max: pulumi.Int(99999),
Min: pulumi.Int(10000),
})
if err != nil {
return err
}
exampleProject, err := log.NewProject(ctx, "exampleProject", &log.ProjectArgs{
Description: pulumi.String("terraform-example"),
})
if err != nil {
return err
}
exampleStore, err := log.NewStore(ctx, "exampleStore", &log.StoreArgs{
Project: exampleProject.Name,
RetentionPeriod: pulumi.Int(3650),
ShardCount: pulumi.Int(3),
AutoSplit: pulumi.Bool(true),
MaxSplitShardCount: pulumi.Int(60),
AppendMeta: pulumi.Bool(true),
})
if err != nil {
return err
}
example2, err := log.NewStore(ctx, "example2", &log.StoreArgs{
Project: exampleProject.Name,
RetentionPeriod: pulumi.Int(3650),
ShardCount: pulumi.Int(3),
AutoSplit: pulumi.Bool(true),
MaxSplitShardCount: pulumi.Int(60),
AppendMeta: pulumi.Bool(true),
})
if err != nil {
return err
}
example3, err := log.NewStore(ctx, "example3", &log.StoreArgs{
Project: exampleProject.Name,
RetentionPeriod: pulumi.Int(3650),
ShardCount: pulumi.Int(3),
AutoSplit: pulumi.Bool(true),
MaxSplitShardCount: pulumi.Int(60),
AppendMeta: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = log.NewEtl(ctx, "exampleEtl", &log.EtlArgs{
EtlName: pulumi.String("terraform-example"),
Project: exampleProject.Name,
DisplayName: pulumi.String("terraform-example"),
Description: pulumi.String("terraform-example"),
AccessKeyId: pulumi.String("access_key_id"),
AccessKeySecret: pulumi.String("access_key_secret"),
Script: pulumi.String("e_set('new','key')"),
Logstore: exampleStore.Name,
EtlSinks: log.EtlEtlSinkArray{
&log.EtlEtlSinkArgs{
Name: pulumi.String("target_name"),
AccessKeyId: pulumi.String("example2_access_key_id"),
AccessKeySecret: pulumi.String("example2_access_key_secret"),
Endpoint: pulumi.String("cn-hangzhou.log.aliyuncs.com"),
Project: exampleProject.Name,
Logstore: example2.Name,
},
&log.EtlEtlSinkArgs{
Name: pulumi.String("target_name2"),
AccessKeyId: pulumi.String("example3_access_key_id"),
AccessKeySecret: pulumi.String("example3_access_key_secret"),
Endpoint: pulumi.String("cn-hangzhou.log.aliyuncs.com"),
Project: exampleProject.Name,
Logstore: example3.Name,
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.random.RandomInteger;
import com.pulumi.random.RandomIntegerArgs;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.log.Etl;
import com.pulumi.alicloud.log.EtlArgs;
import com.pulumi.alicloud.log.inputs.EtlEtlSinkArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var default_ = new RandomInteger("default", RandomIntegerArgs.builder()
.max(99999)
.min(10000)
.build());
var exampleProject = new Project("exampleProject", ProjectArgs.builder()
.description("terraform-example")
.build());
var exampleStore = new Store("exampleStore", StoreArgs.builder()
.project(exampleProject.name())
.retentionPeriod(3650)
.shardCount(3)
.autoSplit(true)
.maxSplitShardCount(60)
.appendMeta(true)
.build());
var example2 = new Store("example2", StoreArgs.builder()
.project(exampleProject.name())
.retentionPeriod(3650)
.shardCount(3)
.autoSplit(true)
.maxSplitShardCount(60)
.appendMeta(true)
.build());
var example3 = new Store("example3", StoreArgs.builder()
.project(exampleProject.name())
.retentionPeriod(3650)
.shardCount(3)
.autoSplit(true)
.maxSplitShardCount(60)
.appendMeta(true)
.build());
var exampleEtl = new Etl("exampleEtl", EtlArgs.builder()
.etlName("terraform-example")
.project(exampleProject.name())
.displayName("terraform-example")
.description("terraform-example")
.accessKeyId("access_key_id")
.accessKeySecret("access_key_secret")
.script("e_set('new','key')")
.logstore(exampleStore.name())
.etlSinks(
EtlEtlSinkArgs.builder()
.name("target_name")
.accessKeyId("example2_access_key_id")
.accessKeySecret("example2_access_key_secret")
.endpoint("cn-hangzhou.log.aliyuncs.com")
.project(exampleProject.name())
.logstore(example2.name())
.build(),
EtlEtlSinkArgs.builder()
.name("target_name2")
.accessKeyId("example3_access_key_id")
.accessKeySecret("example3_access_key_secret")
.endpoint("cn-hangzhou.log.aliyuncs.com")
.project(exampleProject.name())
.logstore(example3.name())
.build())
.build());
}
}
import pulumi
import pulumi_alicloud as alicloud
import pulumi_random as random
default = random.RandomInteger("default",
max=99999,
min=10000)
example_project = alicloud.log.Project("exampleProject", description="terraform-example")
example_store = alicloud.log.Store("exampleStore",
project=example_project.name,
retention_period=3650,
shard_count=3,
auto_split=True,
max_split_shard_count=60,
append_meta=True)
example2 = alicloud.log.Store("example2",
project=example_project.name,
retention_period=3650,
shard_count=3,
auto_split=True,
max_split_shard_count=60,
append_meta=True)
example3 = alicloud.log.Store("example3",
project=example_project.name,
retention_period=3650,
shard_count=3,
auto_split=True,
max_split_shard_count=60,
append_meta=True)
example_etl = alicloud.log.Etl("exampleEtl",
etl_name="terraform-example",
project=example_project.name,
display_name="terraform-example",
description="terraform-example",
access_key_id="access_key_id",
access_key_secret="access_key_secret",
script="e_set('new','key')",
logstore=example_store.name,
etl_sinks=[
alicloud.log.EtlEtlSinkArgs(
name="target_name",
access_key_id="example2_access_key_id",
access_key_secret="example2_access_key_secret",
endpoint="cn-hangzhou.log.aliyuncs.com",
project=example_project.name,
logstore=example2.name,
),
alicloud.log.EtlEtlSinkArgs(
name="target_name2",
access_key_id="example3_access_key_id",
access_key_secret="example3_access_key_secret",
endpoint="cn-hangzhou.log.aliyuncs.com",
project=example_project.name,
logstore=example3.name,
),
])
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
import * as random from "@pulumi/random";
const _default = new random.RandomInteger("default", {
max: 99999,
min: 10000,
});
const exampleProject = new alicloud.log.Project("exampleProject", {description: "terraform-example"});
const exampleStore = new alicloud.log.Store("exampleStore", {
project: exampleProject.name,
retentionPeriod: 3650,
shardCount: 3,
autoSplit: true,
maxSplitShardCount: 60,
appendMeta: true,
});
const example2 = new alicloud.log.Store("example2", {
project: exampleProject.name,
retentionPeriod: 3650,
shardCount: 3,
autoSplit: true,
maxSplitShardCount: 60,
appendMeta: true,
});
const example3 = new alicloud.log.Store("example3", {
project: exampleProject.name,
retentionPeriod: 3650,
shardCount: 3,
autoSplit: true,
maxSplitShardCount: 60,
appendMeta: true,
});
const exampleEtl = new alicloud.log.Etl("exampleEtl", {
etlName: "terraform-example",
project: exampleProject.name,
displayName: "terraform-example",
description: "terraform-example",
accessKeyId: "access_key_id",
accessKeySecret: "access_key_secret",
script: "e_set('new','key')",
logstore: exampleStore.name,
etlSinks: [
{
name: "target_name",
accessKeyId: "example2_access_key_id",
accessKeySecret: "example2_access_key_secret",
endpoint: "cn-hangzhou.log.aliyuncs.com",
project: exampleProject.name,
logstore: example2.name,
},
{
name: "target_name2",
accessKeyId: "example3_access_key_id",
accessKeySecret: "example3_access_key_secret",
endpoint: "cn-hangzhou.log.aliyuncs.com",
project: exampleProject.name,
logstore: example3.name,
},
],
});
resources:
default:
type: random:RandomInteger
properties:
max: 99999
min: 10000
exampleProject:
type: alicloud:log:Project
properties:
description: terraform-example
exampleStore:
type: alicloud:log:Store
properties:
project: ${exampleProject.name}
retentionPeriod: 3650
shardCount: 3
autoSplit: true
maxSplitShardCount: 60
appendMeta: true
example2:
type: alicloud:log:Store
properties:
project: ${exampleProject.name}
retentionPeriod: 3650
shardCount: 3
autoSplit: true
maxSplitShardCount: 60
appendMeta: true
example3:
type: alicloud:log:Store
properties:
project: ${exampleProject.name}
retentionPeriod: 3650
shardCount: 3
autoSplit: true
maxSplitShardCount: 60
appendMeta: true
exampleEtl:
type: alicloud:log:Etl
properties:
etlName: terraform-example
project: ${exampleProject.name}
displayName: terraform-example
description: terraform-example
accessKeyId: access_key_id
accessKeySecret: access_key_secret
script: e_set('new','key')
logstore: ${exampleStore.name}
etlSinks:
- name: target_name
accessKeyId: example2_access_key_id
accessKeySecret: example2_access_key_secret
endpoint: cn-hangzhou.log.aliyuncs.com
project: ${exampleProject.name}
logstore: ${example2.name}
- name: target_name2
accessKeyId: example3_access_key_id
accessKeySecret: example3_access_key_secret
endpoint: cn-hangzhou.log.aliyuncs.com
project: ${exampleProject.name}
logstore: ${example3.name}
Create Etl Resource
new Etl(name: string, args: EtlArgs, opts?: CustomResourceOptions);
@overload
def Etl(resource_name: str,
opts: Optional[ResourceOptions] = None,
access_key_id: Optional[str] = None,
access_key_secret: Optional[str] = None,
create_time: Optional[int] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
etl_name: Optional[str] = None,
etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
etl_type: Optional[str] = None,
from_time: Optional[int] = None,
kms_encrypted_access_key_id: Optional[str] = None,
kms_encrypted_access_key_secret: Optional[str] = None,
kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
last_modified_time: Optional[int] = None,
logstore: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
project: Optional[str] = None,
role_arn: Optional[str] = None,
schedule: Optional[str] = None,
script: Optional[str] = None,
status: Optional[str] = None,
to_time: Optional[int] = None,
version: Optional[int] = None)
@overload
def Etl(resource_name: str,
args: EtlArgs,
opts: Optional[ResourceOptions] = None)
func NewEtl(ctx *Context, name string, args EtlArgs, opts ...ResourceOption) (*Etl, error)
public Etl(string name, EtlArgs args, CustomResourceOptions? opts = null)
type: alicloud:log:Etl
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Etl Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Etl resource accepts the following input properties:
- Display
Name string Log service etl job alias.
- Etl
Name string The name of the log etl job.
- Etl
Sinks List<Pulumi.Ali Cloud. Log. Inputs. Etl Etl Sink> Target logstore configuration for delivery after data processing.
- Logstore string
Delivery target logstore.
- Project string
The project where the target logstore is delivered.
- Script string
Processing operation grammar.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Create
Time int The etl job create time.
- Description string
Description of the log etl job.
- Etl
Type string Log service etl type, the default value is
ETL
.- From
Time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Kms
Encryption Dictionary<string, object>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Kms
Encryption Dictionary<string, object>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Last
Modified intTime ETL job last modified time.
- Parameters Dictionary<string, string>
Advanced parameter configuration of processing operations.
- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Schedule string
Job scheduling type, the default value is Resident.
- Status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- To
Time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- Version int
Log etl job version. the default value is
2
.
- Display
Name string Log service etl job alias.
- Etl
Name string The name of the log etl job.
- Etl
Sinks []EtlEtl Sink Args Target logstore configuration for delivery after data processing.
- Logstore string
Delivery target logstore.
- Project string
The project where the target logstore is delivered.
- Script string
Processing operation grammar.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Create
Time int The etl job create time.
- Description string
Description of the log etl job.
- Etl
Type string Log service etl type, the default value is
ETL
.- From
Time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Kms
Encryption map[string]interface{}Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Kms
Encryption map[string]interface{}Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Last
Modified intTime ETL job last modified time.
- Parameters map[string]string
Advanced parameter configuration of processing operations.
- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Schedule string
Job scheduling type, the default value is Resident.
- Status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- To
Time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- Version int
Log etl job version. the default value is
2
.
- display
Name String Log service etl job alias.
- etl
Name String The name of the log etl job.
- etl
Sinks List<EtlEtl Sink> Target logstore configuration for delivery after data processing.
- logstore String
Delivery target logstore.
- project String
The project where the target logstore is delivered.
- script String
Processing operation grammar.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- create
Time Integer The etl job create time.
- description String
Description of the log etl job.
- etl
Type String Log service etl type, the default value is
ETL
.- from
Time Integer The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption Map<String,Object>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption Map<String,Object>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified IntegerTime ETL job last modified time.
- parameters Map<String,String>
Advanced parameter configuration of processing operations.
- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule String
Job scheduling type, the default value is Resident.
- status String
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time Integer Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version Integer
Log etl job version. the default value is
2
.
- display
Name string Log service etl job alias.
- etl
Name string The name of the log etl job.
- etl
Sinks EtlEtl Sink[] Target logstore configuration for delivery after data processing.
- logstore string
Delivery target logstore.
- project string
The project where the target logstore is delivered.
- script string
Processing operation grammar.
- access
Key stringId Delivery target logstore access key id.
- access
Key stringSecret Delivery target logstore access key secret.
- create
Time number The etl job create time.
- description string
Description of the log etl job.
- etl
Type string Log service etl type, the default value is
ETL
.- from
Time number The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption {[key: string]: any}Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption {[key: string]: any}Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified numberTime ETL job last modified time.
- parameters {[key: string]: string}
Advanced parameter configuration of processing operations.
- role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule string
Job scheduling type, the default value is Resident.
- status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time number Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version number
Log etl job version. the default value is
2
.
- display_
name str Log service etl job alias.
- etl_
name str The name of the log etl job.
- etl_
sinks Sequence[EtlEtl Sink Args] Target logstore configuration for delivery after data processing.
- logstore str
Delivery target logstore.
- project str
The project where the target logstore is delivered.
- script str
Processing operation grammar.
- access_
key_ strid Delivery target logstore access key id.
- access_
key_ strsecret Delivery target logstore access key secret.
- create_
time int The etl job create time.
- description str
Description of the log etl job.
- etl_
type str Log service etl type, the default value is
ETL
.- from_
time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms_
encrypted_ straccess_ key_ id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms_
encrypted_ straccess_ key_ secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms_
encryption_ Mapping[str, Any]access_ key_ id_ context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms_
encryption_ Mapping[str, Any]access_ key_ secret_ context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last_
modified_ inttime ETL job last modified time.
- parameters Mapping[str, str]
Advanced parameter configuration of processing operations.
- role_
arn str Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule str
Job scheduling type, the default value is Resident.
- status str
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to_
time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version int
Log etl job version. the default value is
2
.
- display
Name String Log service etl job alias.
- etl
Name String The name of the log etl job.
- etl
Sinks List<Property Map> Target logstore configuration for delivery after data processing.
- logstore String
Delivery target logstore.
- project String
The project where the target logstore is delivered.
- script String
Processing operation grammar.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- create
Time Number The etl job create time.
- description String
Description of the log etl job.
- etl
Type String Log service etl type, the default value is
ETL
.- from
Time Number The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption Map<Any>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption Map<Any>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified NumberTime ETL job last modified time.
- parameters Map<String>
Advanced parameter configuration of processing operations.
- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule String
Job scheduling type, the default value is Resident.
- status String
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time Number Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version Number
Log etl job version. the default value is
2
.
Outputs
All input properties are implicitly available as output properties. Additionally, the Etl resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up Existing Etl Resource
Get an existing Etl resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: EtlState, opts?: CustomResourceOptions): Etl
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
access_key_id: Optional[str] = None,
access_key_secret: Optional[str] = None,
create_time: Optional[int] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
etl_name: Optional[str] = None,
etl_sinks: Optional[Sequence[EtlEtlSinkArgs]] = None,
etl_type: Optional[str] = None,
from_time: Optional[int] = None,
kms_encrypted_access_key_id: Optional[str] = None,
kms_encrypted_access_key_secret: Optional[str] = None,
kms_encryption_access_key_id_context: Optional[Mapping[str, Any]] = None,
kms_encryption_access_key_secret_context: Optional[Mapping[str, Any]] = None,
last_modified_time: Optional[int] = None,
logstore: Optional[str] = None,
parameters: Optional[Mapping[str, str]] = None,
project: Optional[str] = None,
role_arn: Optional[str] = None,
schedule: Optional[str] = None,
script: Optional[str] = None,
status: Optional[str] = None,
to_time: Optional[int] = None,
version: Optional[int] = None) -> Etl
func GetEtl(ctx *Context, name string, id IDInput, state *EtlState, opts ...ResourceOption) (*Etl, error)
public static Etl Get(string name, Input<string> id, EtlState? state, CustomResourceOptions? opts = null)
public static Etl get(String name, Output<String> id, EtlState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Create
Time int The etl job create time.
- Description string
Description of the log etl job.
- Display
Name string Log service etl job alias.
- Etl
Name string The name of the log etl job.
- Etl
Sinks List<Pulumi.Ali Cloud. Log. Inputs. Etl Etl Sink> Target logstore configuration for delivery after data processing.
- Etl
Type string Log service etl type, the default value is
ETL
.- From
Time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Kms
Encryption Dictionary<string, object>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Kms
Encryption Dictionary<string, object>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Last
Modified intTime ETL job last modified time.
- Logstore string
Delivery target logstore.
- Parameters Dictionary<string, string>
Advanced parameter configuration of processing operations.
- Project string
The project where the target logstore is delivered.
- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Schedule string
Job scheduling type, the default value is Resident.
- Script string
Processing operation grammar.
- Status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- To
Time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- Version int
Log etl job version. the default value is
2
.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Create
Time int The etl job create time.
- Description string
Description of the log etl job.
- Display
Name string Log service etl job alias.
- Etl
Name string The name of the log etl job.
- Etl
Sinks []EtlEtl Sink Args Target logstore configuration for delivery after data processing.
- Etl
Type string Log service etl type, the default value is
ETL
.- From
Time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Kms
Encryption map[string]interface{}Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Kms
Encryption map[string]interface{}Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- Last
Modified intTime ETL job last modified time.
- Logstore string
Delivery target logstore.
- Parameters map[string]string
Advanced parameter configuration of processing operations.
- Project string
The project where the target logstore is delivered.
- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Schedule string
Job scheduling type, the default value is Resident.
- Script string
Processing operation grammar.
- Status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- To
Time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- Version int
Log etl job version. the default value is
2
.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- create
Time Integer The etl job create time.
- description String
Description of the log etl job.
- display
Name String Log service etl job alias.
- etl
Name String The name of the log etl job.
- etl
Sinks List<EtlEtl Sink> Target logstore configuration for delivery after data processing.
- etl
Type String Log service etl type, the default value is
ETL
.- from
Time Integer The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption Map<String,Object>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption Map<String,Object>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified IntegerTime ETL job last modified time.
- logstore String
Delivery target logstore.
- parameters Map<String,String>
Advanced parameter configuration of processing operations.
- project String
The project where the target logstore is delivered.
- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule String
Job scheduling type, the default value is Resident.
- script String
Processing operation grammar.
- status String
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time Integer Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version Integer
Log etl job version. the default value is
2
.
- access
Key stringId Delivery target logstore access key id.
- access
Key stringSecret Delivery target logstore access key secret.
- create
Time number The etl job create time.
- description string
Description of the log etl job.
- display
Name string Log service etl job alias.
- etl
Name string The name of the log etl job.
- etl
Sinks EtlEtl Sink[] Target logstore configuration for delivery after data processing.
- etl
Type string Log service etl type, the default value is
ETL
.- from
Time number The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption {[key: string]: any}Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption {[key: string]: any}Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified numberTime ETL job last modified time.
- logstore string
Delivery target logstore.
- parameters {[key: string]: string}
Advanced parameter configuration of processing operations.
- project string
The project where the target logstore is delivered.
- role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule string
Job scheduling type, the default value is Resident.
- script string
Processing operation grammar.
- status string
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time number Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version number
Log etl job version. the default value is
2
.
- access_
key_ strid Delivery target logstore access key id.
- access_
key_ strsecret Delivery target logstore access key secret.
- create_
time int The etl job create time.
- description str
Description of the log etl job.
- display_
name str Log service etl job alias.
- etl_
name str The name of the log etl job.
- etl_
sinks Sequence[EtlEtl Sink Args] Target logstore configuration for delivery after data processing.
- etl_
type str Log service etl type, the default value is
ETL
.- from_
time int The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms_
encrypted_ straccess_ key_ id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms_
encrypted_ straccess_ key_ secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms_
encryption_ Mapping[str, Any]access_ key_ id_ context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms_
encryption_ Mapping[str, Any]access_ key_ secret_ context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last_
modified_ inttime ETL job last modified time.
- logstore str
Delivery target logstore.
- parameters Mapping[str, str]
Advanced parameter configuration of processing operations.
- project str
The project where the target logstore is delivered.
- role_
arn str Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule str
Job scheduling type, the default value is Resident.
- script str
Processing operation grammar.
- status str
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to_
time int Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version int
Log etl job version. the default value is
2
.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- create
Time Number The etl job create time.
- description String
Description of the log etl job.
- display
Name String Log service etl job alias.
- etl
Name String The name of the log etl job.
- etl
Sinks List<Property Map> Target logstore configuration for delivery after data processing.
- etl
Type String Log service etl type, the default value is
ETL
.- from
Time Number The start time of the processing job, if not set the value is 0, indicates to start processing from the oldest data.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- kms
Encryption Map<Any>Access Key Id Context An KMS encryption context used to decrypt
kms_encrypted_access_key_id
before creating or updating an instance withkms_encrypted_access_key_id
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- kms
Encryption Map<Any>Access Key Secret Context An KMS encryption context used to decrypt
kms_encrypted_access_key_secret
before creating or updating an instance withkms_encrypted_access_key_secret
. See Encryption Context. It is valid whenkms_encrypted_password
is set. When it is changed, the instance will reboot to make the change take effect.- last
Modified NumberTime ETL job last modified time.
- logstore String
Delivery target logstore.
- parameters Map<String>
Advanced parameter configuration of processing operations.
- project String
The project where the target logstore is delivered.
- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- schedule String
Job scheduling type, the default value is Resident.
- script String
Processing operation grammar.
- status String
Log project tags. the default value is RUNNING, Only 4 values are supported:
STARTING
,RUNNING
,STOPPING
,STOPPED
.- to
Time Number Deadline of processing job, if not set the value is 0, indicates that new data will be processed continuously.
- version Number
Log etl job version. the default value is
2
.
Supporting Types
EtlEtlSink, EtlEtlSinkArgs
- Endpoint string
Delivery target logstore region.
- Logstore string
Delivery target logstore.
- Name string
Delivery target name.
- Project string
The project where the target logstore is delivered.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Type string
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
- Endpoint string
Delivery target logstore region.
- Logstore string
Delivery target logstore.
- Name string
Delivery target name.
- Project string
The project where the target logstore is delivered.
- Access
Key stringId Delivery target logstore access key id.
- Access
Key stringSecret Delivery target logstore access key secret.
- Kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- Kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- Role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- Type string
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
- endpoint String
Delivery target logstore region.
- logstore String
Delivery target logstore.
- name String
Delivery target name.
- project String
The project where the target logstore is delivered.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- type String
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
- endpoint string
Delivery target logstore region.
- logstore string
Delivery target logstore.
- name string
Delivery target name.
- project string
The project where the target logstore is delivered.
- access
Key stringId Delivery target logstore access key id.
- access
Key stringSecret Delivery target logstore access key secret.
- kms
Encrypted stringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted stringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- role
Arn string Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- type string
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
- endpoint str
Delivery target logstore region.
- logstore str
Delivery target logstore.
- name str
Delivery target name.
- project str
The project where the target logstore is delivered.
- access_
key_ strid Delivery target logstore access key id.
- access_
key_ strsecret Delivery target logstore access key secret.
- kms_
encrypted_ straccess_ key_ id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms_
encrypted_ straccess_ key_ secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- role_
arn str Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- type str
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
- endpoint String
Delivery target logstore region.
- logstore String
Delivery target logstore.
- name String
Delivery target name.
- project String
The project where the target logstore is delivered.
- access
Key StringId Delivery target logstore access key id.
- access
Key StringSecret Delivery target logstore access key secret.
- kms
Encrypted StringAccess Key Id An KMS encrypts access key id used to a log etl job. If the
access_key_id
is filled in, this field will be ignored.- kms
Encrypted StringAccess Key Secret An KMS encrypts access key secret used to a log etl job. If the
access_key_secret
is filled in, this field will be ignored.- role
Arn String Sts role info under delivery target logstore.
role_arn
and(access_key_id, access_key_secret)
fill in at most one. If you do not fill in both, then you must fill in(kms_encrypted_access_key_id, kms_encrypted_access_key_secret, kms_encryption_access_key_id_context, kms_encryption_access_key_secret_context)
to use KMS to get the key pair.- type String
ETL sinks type, the default value is AliyunLOG.
Note:
from_time
andto_time
no modification allowed after successful creation.
Import
Log etl can be imported using the id, e.g.
$ pulumi import alicloud:log/etl:Etl example tf-log-project:tf-log-etl-name
Package Details
- Repository
- Alibaba Cloud pulumi/pulumi-alicloud
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
alicloud
Terraform Provider.