published on Saturday, Mar 14, 2026 by Pulumi
published on Saturday, Mar 14, 2026 by Pulumi
Provides a Log Service (SLS) Etl resource.
For information about Log Service (SLS) Etl and how to use it, see What is Etl.
NOTE: Available since v1.248.0.
Example Usage
Basic Usage
import * as pulumi from "@pulumi/pulumi";
import * as alicloud from "@pulumi/alicloud";
const config = new pulumi.Config();
const name = config.get("name") || "terraform-example";
const defaulthhAPo6 = new alicloud.log.Project("defaulthhAPo6", {
description: "terraform-etl-example-813",
projectName: "terraform-etl-example-330",
});
const defaultzWKLkp = new alicloud.log.Store("defaultzWKLkp", {
hotTtl: 8,
retentionPeriod: 30,
shardCount: 2,
projectName: defaulthhAPo6.id,
logstoreName: "example",
});
const _default = new alicloud.sls.Etl("default", {
project: defaulthhAPo6.id,
description: "etl-1740472705-185721",
configuration: {
script: "* | extend a=1",
lang: "SPL",
roleArn: name,
sinks: [{
name: "11111",
endpoint: "cn-hangzhou-intranet.log.aliyuncs.com",
project: "gy-hangzhou-huolang-1",
logstore: "gy-rm2",
datasets: ["__UNNAMED__"],
roleArn: name,
}],
logstore: defaultzWKLkp.logstoreName,
fromTime: 1706771697,
toTime: 1738394097,
},
jobName: "etl-1740472705-185721",
displayName: "etl-1740472705-185721",
});
import pulumi
import pulumi_alicloud as alicloud
config = pulumi.Config()
name = config.get("name")
if name is None:
name = "terraform-example"
defaulthh_a_po6 = alicloud.log.Project("defaulthhAPo6",
description="terraform-etl-example-813",
project_name="terraform-etl-example-330")
defaultz_wk_lkp = alicloud.log.Store("defaultzWKLkp",
hot_ttl=8,
retention_period=30,
shard_count=2,
project_name=defaulthh_a_po6.id,
logstore_name="example")
default = alicloud.sls.Etl("default",
project=defaulthh_a_po6.id,
description="etl-1740472705-185721",
configuration={
"script": "* | extend a=1",
"lang": "SPL",
"role_arn": name,
"sinks": [{
"name": "11111",
"endpoint": "cn-hangzhou-intranet.log.aliyuncs.com",
"project": "gy-hangzhou-huolang-1",
"logstore": "gy-rm2",
"datasets": ["__UNNAMED__"],
"role_arn": name,
}],
"logstore": defaultz_wk_lkp.logstore_name,
"from_time": 1706771697,
"to_time": 1738394097,
},
job_name="etl-1740472705-185721",
display_name="etl-1740472705-185721")
package main
import (
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/log"
"github.com/pulumi/pulumi-alicloud/sdk/v3/go/alicloud/sls"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi/config"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
cfg := config.New(ctx, "")
name := "terraform-example"
if param := cfg.Get("name"); param != "" {
name = param
}
defaulthhAPo6, err := log.NewProject(ctx, "defaulthhAPo6", &log.ProjectArgs{
Description: pulumi.String("terraform-etl-example-813"),
ProjectName: pulumi.String("terraform-etl-example-330"),
})
if err != nil {
return err
}
defaultzWKLkp, err := log.NewStore(ctx, "defaultzWKLkp", &log.StoreArgs{
HotTtl: pulumi.Int(8),
RetentionPeriod: pulumi.Int(30),
ShardCount: pulumi.Int(2),
ProjectName: defaulthhAPo6.ID(),
LogstoreName: pulumi.String("example"),
})
if err != nil {
return err
}
_, err = sls.NewEtl(ctx, "default", &sls.EtlArgs{
Project: defaulthhAPo6.ID(),
Description: pulumi.String("etl-1740472705-185721"),
Configuration: &sls.EtlConfigurationArgs{
Script: pulumi.String("* | extend a=1"),
Lang: pulumi.String("SPL"),
RoleArn: pulumi.String(name),
Sinks: sls.EtlConfigurationSinkArray{
&sls.EtlConfigurationSinkArgs{
Name: pulumi.String("11111"),
Endpoint: pulumi.String("cn-hangzhou-intranet.log.aliyuncs.com"),
Project: pulumi.String("gy-hangzhou-huolang-1"),
Logstore: pulumi.String("gy-rm2"),
Datasets: pulumi.StringArray{
pulumi.String("__UNNAMED__"),
},
RoleArn: pulumi.String(name),
},
},
Logstore: defaultzWKLkp.LogstoreName,
FromTime: pulumi.Int(1706771697),
ToTime: pulumi.Int(1738394097),
},
JobName: pulumi.String("etl-1740472705-185721"),
DisplayName: pulumi.String("etl-1740472705-185721"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AliCloud = Pulumi.AliCloud;
return await Deployment.RunAsync(() =>
{
var config = new Config();
var name = config.Get("name") ?? "terraform-example";
var defaulthhAPo6 = new AliCloud.Log.Project("defaulthhAPo6", new()
{
Description = "terraform-etl-example-813",
ProjectName = "terraform-etl-example-330",
});
var defaultzWKLkp = new AliCloud.Log.Store("defaultzWKLkp", new()
{
HotTtl = 8,
RetentionPeriod = 30,
ShardCount = 2,
ProjectName = defaulthhAPo6.Id,
LogstoreName = "example",
});
var @default = new AliCloud.Sls.Etl("default", new()
{
Project = defaulthhAPo6.Id,
Description = "etl-1740472705-185721",
Configuration = new AliCloud.Sls.Inputs.EtlConfigurationArgs
{
Script = "* | extend a=1",
Lang = "SPL",
RoleArn = name,
Sinks = new[]
{
new AliCloud.Sls.Inputs.EtlConfigurationSinkArgs
{
Name = "11111",
Endpoint = "cn-hangzhou-intranet.log.aliyuncs.com",
Project = "gy-hangzhou-huolang-1",
Logstore = "gy-rm2",
Datasets = new[]
{
"__UNNAMED__",
},
RoleArn = name,
},
},
Logstore = defaultzWKLkp.LogstoreName,
FromTime = 1706771697,
ToTime = 1738394097,
},
JobName = "etl-1740472705-185721",
DisplayName = "etl-1740472705-185721",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.alicloud.log.Project;
import com.pulumi.alicloud.log.ProjectArgs;
import com.pulumi.alicloud.log.Store;
import com.pulumi.alicloud.log.StoreArgs;
import com.pulumi.alicloud.sls.Etl;
import com.pulumi.alicloud.sls.EtlArgs;
import com.pulumi.alicloud.sls.inputs.EtlConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var config = ctx.config();
final var name = config.get("name").orElse("terraform-example");
var defaulthhAPo6 = new Project("defaulthhAPo6", ProjectArgs.builder()
.description("terraform-etl-example-813")
.projectName("terraform-etl-example-330")
.build());
var defaultzWKLkp = new Store("defaultzWKLkp", StoreArgs.builder()
.hotTtl(8)
.retentionPeriod(30)
.shardCount(2)
.projectName(defaulthhAPo6.id())
.logstoreName("example")
.build());
var default_ = new Etl("default", EtlArgs.builder()
.project(defaulthhAPo6.id())
.description("etl-1740472705-185721")
.configuration(EtlConfigurationArgs.builder()
.script("* | extend a=1")
.lang("SPL")
.roleArn(name)
.sinks(EtlConfigurationSinkArgs.builder()
.name("11111")
.endpoint("cn-hangzhou-intranet.log.aliyuncs.com")
.project("gy-hangzhou-huolang-1")
.logstore("gy-rm2")
.datasets("__UNNAMED__")
.roleArn(name)
.build())
.logstore(defaultzWKLkp.logstoreName())
.fromTime(1706771697)
.toTime(1738394097)
.build())
.jobName("etl-1740472705-185721")
.displayName("etl-1740472705-185721")
.build());
}
}
configuration:
name:
type: string
default: terraform-example
resources:
defaulthhAPo6:
type: alicloud:log:Project
properties:
description: terraform-etl-example-813
projectName: terraform-etl-example-330
defaultzWKLkp:
type: alicloud:log:Store
properties:
hotTtl: '8'
retentionPeriod: '30'
shardCount: '2'
projectName: ${defaulthhAPo6.id}
logstoreName: example
default:
type: alicloud:sls:Etl
properties:
project: ${defaulthhAPo6.id}
description: etl-1740472705-185721
configuration:
script: '* | extend a=1'
lang: SPL
roleArn: ${name}
sinks:
- name: '11111'
endpoint: cn-hangzhou-intranet.log.aliyuncs.com
project: gy-hangzhou-huolang-1
logstore: gy-rm2
datasets:
- __UNNAMED__
roleArn: ${name}
logstore: ${defaultzWKLkp.logstoreName}
fromTime: '1706771697'
toTime: '1738394097'
jobName: etl-1740472705-185721
displayName: etl-1740472705-185721
📚 Need more examples? VIEW MORE EXAMPLES
Create Etl Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Etl(name: string, args: EtlArgs, opts?: CustomResourceOptions);@overload
def Etl(resource_name: str,
args: EtlArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Etl(resource_name: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[EtlConfigurationArgs] = None,
display_name: Optional[str] = None,
job_name: Optional[str] = None,
project: Optional[str] = None,
description: Optional[str] = None)func NewEtl(ctx *Context, name string, args EtlArgs, opts ...ResourceOption) (*Etl, error)public Etl(string name, EtlArgs args, CustomResourceOptions? opts = null)type: alicloud:sls:Etl
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args EtlArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var alicloudEtlResource = new AliCloud.Sls.Etl("alicloudEtlResource", new()
{
Configuration = new AliCloud.Sls.Inputs.EtlConfigurationArgs
{
FromTime = 0,
Lang = "string",
Logstore = "string",
RoleArn = "string",
Script = "string",
Sinks = new[]
{
new AliCloud.Sls.Inputs.EtlConfigurationSinkArgs
{
Datasets = new[]
{
"string",
},
Endpoint = "string",
Logstore = "string",
Name = "string",
Project = "string",
RoleArn = "string",
},
},
ToTime = 0,
Parameters =
{
{ "string", "string" },
},
},
DisplayName = "string",
JobName = "string",
Project = "string",
Description = "string",
});
example, err := sls.NewEtl(ctx, "alicloudEtlResource", &sls.EtlArgs{
Configuration: &sls.EtlConfigurationArgs{
FromTime: pulumi.Int(0),
Lang: pulumi.String("string"),
Logstore: pulumi.String("string"),
RoleArn: pulumi.String("string"),
Script: pulumi.String("string"),
Sinks: sls.EtlConfigurationSinkArray{
&sls.EtlConfigurationSinkArgs{
Datasets: pulumi.StringArray{
pulumi.String("string"),
},
Endpoint: pulumi.String("string"),
Logstore: pulumi.String("string"),
Name: pulumi.String("string"),
Project: pulumi.String("string"),
RoleArn: pulumi.String("string"),
},
},
ToTime: pulumi.Int(0),
Parameters: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
DisplayName: pulumi.String("string"),
JobName: pulumi.String("string"),
Project: pulumi.String("string"),
Description: pulumi.String("string"),
})
var alicloudEtlResource = new com.pulumi.alicloud.sls.Etl("alicloudEtlResource", com.pulumi.alicloud.sls.EtlArgs.builder()
.configuration(EtlConfigurationArgs.builder()
.fromTime(0)
.lang("string")
.logstore("string")
.roleArn("string")
.script("string")
.sinks(EtlConfigurationSinkArgs.builder()
.datasets("string")
.endpoint("string")
.logstore("string")
.name("string")
.project("string")
.roleArn("string")
.build())
.toTime(0)
.parameters(Map.of("string", "string"))
.build())
.displayName("string")
.jobName("string")
.project("string")
.description("string")
.build());
alicloud_etl_resource = alicloud.sls.Etl("alicloudEtlResource",
configuration={
"from_time": 0,
"lang": "string",
"logstore": "string",
"role_arn": "string",
"script": "string",
"sinks": [{
"datasets": ["string"],
"endpoint": "string",
"logstore": "string",
"name": "string",
"project": "string",
"role_arn": "string",
}],
"to_time": 0,
"parameters": {
"string": "string",
},
},
display_name="string",
job_name="string",
project="string",
description="string")
const alicloudEtlResource = new alicloud.sls.Etl("alicloudEtlResource", {
configuration: {
fromTime: 0,
lang: "string",
logstore: "string",
roleArn: "string",
script: "string",
sinks: [{
datasets: ["string"],
endpoint: "string",
logstore: "string",
name: "string",
project: "string",
roleArn: "string",
}],
toTime: 0,
parameters: {
string: "string",
},
},
displayName: "string",
jobName: "string",
project: "string",
description: "string",
});
type: alicloud:sls:Etl
properties:
configuration:
fromTime: 0
lang: string
logstore: string
parameters:
string: string
roleArn: string
script: string
sinks:
- datasets:
- string
endpoint: string
logstore: string
name: string
project: string
roleArn: string
toTime: 0
description: string
displayName: string
jobName: string
project: string
Etl Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Etl resource accepts the following input properties:
- Configuration
Pulumi.
Ali Cloud. Sls. Inputs. Etl Configuration - Detailed configuration of the data processing task. See
configurationbelow. - Display
Name string - The display name of the data processing task.
- Job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- Project string
- Project name.
- Description string
- Description of the data processing task.
- Configuration
Etl
Configuration Args - Detailed configuration of the data processing task. See
configurationbelow. - Display
Name string - The display name of the data processing task.
- Job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- Project string
- Project name.
- Description string
- Description of the data processing task.
- configuration
Etl
Configuration - Detailed configuration of the data processing task. See
configurationbelow. - display
Name String - The display name of the data processing task.
- job
Name String - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project String
- Project name.
- description String
- Description of the data processing task.
- configuration
Etl
Configuration - Detailed configuration of the data processing task. See
configurationbelow. - display
Name string - The display name of the data processing task.
- job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project string
- Project name.
- description string
- Description of the data processing task.
- configuration
Etl
Configuration Args - Detailed configuration of the data processing task. See
configurationbelow. - display_
name str - The display name of the data processing task.
- job_
name str - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project str
- Project name.
- description str
- Description of the data processing task.
- configuration Property Map
- Detailed configuration of the data processing task. See
configurationbelow. - display
Name String - The display name of the data processing task.
- job
Name String - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project String
- Project name.
- description String
- Description of the data processing task.
Outputs
All input properties are implicitly available as output properties. Additionally, the Etl resource produces the following output properties:
- Create
Time int - The time when the task was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Status string
- Task status.
- Create
Time int - The time when the task was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- Status string
- Task status.
- create
Time Integer - The time when the task was created.
- id String
- The provider-assigned unique ID for this managed resource.
- status String
- Task status.
- create
Time number - The time when the task was created.
- id string
- The provider-assigned unique ID for this managed resource.
- status string
- Task status.
- create_
time int - The time when the task was created.
- id str
- The provider-assigned unique ID for this managed resource.
- status str
- Task status.
- create
Time Number - The time when the task was created.
- id String
- The provider-assigned unique ID for this managed resource.
- status String
- Task status.
Look up Existing Etl Resource
Get an existing Etl resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: EtlState, opts?: CustomResourceOptions): Etl@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
configuration: Optional[EtlConfigurationArgs] = None,
create_time: Optional[int] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
job_name: Optional[str] = None,
project: Optional[str] = None,
status: Optional[str] = None) -> Etlfunc GetEtl(ctx *Context, name string, id IDInput, state *EtlState, opts ...ResourceOption) (*Etl, error)public static Etl Get(string name, Input<string> id, EtlState? state, CustomResourceOptions? opts = null)public static Etl get(String name, Output<String> id, EtlState state, CustomResourceOptions options)resources: _: type: alicloud:sls:Etl get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Configuration
Pulumi.
Ali Cloud. Sls. Inputs. Etl Configuration - Detailed configuration of the data processing task. See
configurationbelow. - Create
Time int - The time when the task was created.
- Description string
- Description of the data processing task.
- Display
Name string - The display name of the data processing task.
- Job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- Project string
- Project name.
- Status string
- Task status.
- Configuration
Etl
Configuration Args - Detailed configuration of the data processing task. See
configurationbelow. - Create
Time int - The time when the task was created.
- Description string
- Description of the data processing task.
- Display
Name string - The display name of the data processing task.
- Job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- Project string
- Project name.
- Status string
- Task status.
- configuration
Etl
Configuration - Detailed configuration of the data processing task. See
configurationbelow. - create
Time Integer - The time when the task was created.
- description String
- Description of the data processing task.
- display
Name String - The display name of the data processing task.
- job
Name String - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project String
- Project name.
- status String
- Task status.
- configuration
Etl
Configuration - Detailed configuration of the data processing task. See
configurationbelow. - create
Time number - The time when the task was created.
- description string
- Description of the data processing task.
- display
Name string - The display name of the data processing task.
- job
Name string - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project string
- Project name.
- status string
- Task status.
- configuration
Etl
Configuration Args - Detailed configuration of the data processing task. See
configurationbelow. - create_
time int - The time when the task was created.
- description str
- Description of the data processing task.
- display_
name str - The display name of the data processing task.
- job_
name str - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project str
- Project name.
- status str
- Task status.
- configuration Property Map
- Detailed configuration of the data processing task. See
configurationbelow. - create
Time Number - The time when the task was created.
- description String
- Description of the data processing task.
- display
Name String - The display name of the data processing task.
- job
Name String - The job name. Naming rules are as follows:
- Job names must be unique within the same project.
- Can only contain lowercase letters, digits, hyphens (-), and underscores (_).
- Must start and end with a lowercase letter or digit.
- Must be 2 to 64 characters in length.
- project String
- Project name.
- status String
- Task status.
Supporting Types
EtlConfiguration, EtlConfigurationArgs
- From
Time int - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- Lang string
- The syntax type used for data transformation.
- Logstore string
- The name of the destination Logstore.
- Role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- Script string
- Processing script.
- Sinks
List<Pulumi.
Ali Cloud. Sls. Inputs. Etl Configuration Sink> - List of output destinations for processing results. See
sinkbelow. - To
Time int - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- Parameters Dictionary<string, string>
- Advanced parameter configuration.
- From
Time int - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- Lang string
- The syntax type used for data transformation.
- Logstore string
- The name of the destination Logstore.
- Role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- Script string
- Processing script.
- Sinks
[]Etl
Configuration Sink - List of output destinations for processing results. See
sinkbelow. - To
Time int - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- Parameters map[string]string
- Advanced parameter configuration.
- from
Time Integer - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- lang String
- The syntax type used for data transformation.
- logstore String
- The name of the destination Logstore.
- role
Arn String - The ARN of the role authorized to write to the destination Logstore.
- script String
- Processing script.
- sinks
List<Etl
Configuration Sink> - List of output destinations for processing results. See
sinkbelow. - to
Time Integer - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- parameters Map<String,String>
- Advanced parameter configuration.
- from
Time number - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- lang string
- The syntax type used for data transformation.
- logstore string
- The name of the destination Logstore.
- role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- script string
- Processing script.
- sinks
Etl
Configuration Sink[] - List of output destinations for processing results. See
sinkbelow. - to
Time number - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- parameters {[key: string]: string}
- Advanced parameter configuration.
- from_
time int - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- lang str
- The syntax type used for data transformation.
- logstore str
- The name of the destination Logstore.
- role_
arn str - The ARN of the role authorized to write to the destination Logstore.
- script str
- Processing script.
- sinks
Sequence[Etl
Configuration Sink] - List of output destinations for processing results. See
sinkbelow. - to_
time int - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- parameters Mapping[str, str]
- Advanced parameter configuration.
- from
Time Number - The start timestamp of the processing time (accurate to the second). Enter 0 to start consuming from the first log received in the source Logstore.
- lang String
- The syntax type used for data transformation.
- logstore String
- The name of the destination Logstore.
- role
Arn String - The ARN of the role authorized to write to the destination Logstore.
- script String
- Processing script.
- sinks List<Property Map>
- List of output destinations for processing results. See
sinkbelow. - to
Time Number - End timestamp of the processing time (accurate to the second). Enter 0 if processing continues until manually stopped.
- parameters Map<String>
- Advanced parameter configuration.
EtlConfigurationSink, EtlConfigurationSinkArgs
- Datasets List<string>
- Result datasets to write to.
- Endpoint string
- The endpoint of the region where the destination project resides.
- Logstore string
- The name of the destination Logstore.
- Name string
- The name of the output destination.
- Project string
- Project name.
- Role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- Datasets []string
- Result datasets to write to.
- Endpoint string
- The endpoint of the region where the destination project resides.
- Logstore string
- The name of the destination Logstore.
- Name string
- The name of the output destination.
- Project string
- Project name.
- Role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- datasets List<String>
- Result datasets to write to.
- endpoint String
- The endpoint of the region where the destination project resides.
- logstore String
- The name of the destination Logstore.
- name String
- The name of the output destination.
- project String
- Project name.
- role
Arn String - The ARN of the role authorized to write to the destination Logstore.
- datasets string[]
- Result datasets to write to.
- endpoint string
- The endpoint of the region where the destination project resides.
- logstore string
- The name of the destination Logstore.
- name string
- The name of the output destination.
- project string
- Project name.
- role
Arn string - The ARN of the role authorized to write to the destination Logstore.
- datasets Sequence[str]
- Result datasets to write to.
- endpoint str
- The endpoint of the region where the destination project resides.
- logstore str
- The name of the destination Logstore.
- name str
- The name of the output destination.
- project str
- Project name.
- role_
arn str - The ARN of the role authorized to write to the destination Logstore.
- datasets List<String>
- Result datasets to write to.
- endpoint String
- The endpoint of the region where the destination project resides.
- logstore String
- The name of the destination Logstore.
- name String
- The name of the output destination.
- project String
- Project name.
- role
Arn String - The ARN of the role authorized to write to the destination Logstore.
Import
Log Service (SLS) Etl can be imported using the id, e.g.
$ pulumi import alicloud:sls/etl:Etl example <project>:<job_name>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Alibaba Cloud pulumi/pulumi-alicloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
alicloudTerraform Provider.
published on Saturday, Mar 14, 2026 by Pulumi
