tencentcloud 1.82.16 published on Friday, Aug 8, 2025 by tencentcloudstack
tencentcloud.getDlcDescribeUpdatableDataEngines
Explore with Pulumi AI
tencentcloud 1.82.16 published on Friday, Aug 8, 2025 by tencentcloudstack
Use this data source to query detailed information of DLC describe updatable data engines
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const example = tencentcloud.getDlcDescribeUpdatableDataEngines({
dataEngineConfigCommand: "UpdateSparkSQLLakefsPath",
});
import pulumi
import pulumi_tencentcloud as tencentcloud
example = tencentcloud.get_dlc_describe_updatable_data_engines(data_engine_config_command="UpdateSparkSQLLakefsPath")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := tencentcloud.GetDlcDescribeUpdatableDataEngines(ctx, &tencentcloud.GetDlcDescribeUpdatableDataEnginesArgs{
DataEngineConfigCommand: "UpdateSparkSQLLakefsPath",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var example = Tencentcloud.GetDlcDescribeUpdatableDataEngines.Invoke(new()
{
DataEngineConfigCommand = "UpdateSparkSQLLakefsPath",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.TencentcloudFunctions;
import com.pulumi.tencentcloud.inputs.GetDlcDescribeUpdatableDataEnginesArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = TencentcloudFunctions.getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs.builder()
.dataEngineConfigCommand("UpdateSparkSQLLakefsPath")
.build());
}
}
variables:
example:
fn::invoke:
function: tencentcloud:getDlcDescribeUpdatableDataEngines
arguments:
dataEngineConfigCommand: UpdateSparkSQLLakefsPath
Or
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const example = tencentcloud.getDlcDescribeUpdatableDataEngines({
dataEngineConfigCommand: "UpdateSparkSQLResultPath",
});
import pulumi
import pulumi_tencentcloud as tencentcloud
example = tencentcloud.get_dlc_describe_updatable_data_engines(data_engine_config_command="UpdateSparkSQLResultPath")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := tencentcloud.GetDlcDescribeUpdatableDataEngines(ctx, &tencentcloud.GetDlcDescribeUpdatableDataEnginesArgs{
DataEngineConfigCommand: "UpdateSparkSQLResultPath",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var example = Tencentcloud.GetDlcDescribeUpdatableDataEngines.Invoke(new()
{
DataEngineConfigCommand = "UpdateSparkSQLResultPath",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.TencentcloudFunctions;
import com.pulumi.tencentcloud.inputs.GetDlcDescribeUpdatableDataEnginesArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = TencentcloudFunctions.getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs.builder()
.dataEngineConfigCommand("UpdateSparkSQLResultPath")
.build());
}
}
variables:
example:
fn::invoke:
function: tencentcloud:getDlcDescribeUpdatableDataEngines
arguments:
dataEngineConfigCommand: UpdateSparkSQLResultPath
Using getDlcDescribeUpdatableDataEngines
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getDlcDescribeUpdatableDataEngines(args: GetDlcDescribeUpdatableDataEnginesArgs, opts?: InvokeOptions): Promise<GetDlcDescribeUpdatableDataEnginesResult>
function getDlcDescribeUpdatableDataEnginesOutput(args: GetDlcDescribeUpdatableDataEnginesOutputArgs, opts?: InvokeOptions): Output<GetDlcDescribeUpdatableDataEnginesResult>
def get_dlc_describe_updatable_data_engines(data_engine_config_command: Optional[str] = None,
id: Optional[str] = None,
result_output_file: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetDlcDescribeUpdatableDataEnginesResult
def get_dlc_describe_updatable_data_engines_output(data_engine_config_command: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
result_output_file: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetDlcDescribeUpdatableDataEnginesResult]
func GetDlcDescribeUpdatableDataEngines(ctx *Context, args *GetDlcDescribeUpdatableDataEnginesArgs, opts ...InvokeOption) (*GetDlcDescribeUpdatableDataEnginesResult, error)
func GetDlcDescribeUpdatableDataEnginesOutput(ctx *Context, args *GetDlcDescribeUpdatableDataEnginesOutputArgs, opts ...InvokeOption) GetDlcDescribeUpdatableDataEnginesResultOutput
> Note: This function is named GetDlcDescribeUpdatableDataEngines
in the Go SDK.
public static class GetDlcDescribeUpdatableDataEngines
{
public static Task<GetDlcDescribeUpdatableDataEnginesResult> InvokeAsync(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions? opts = null)
public static Output<GetDlcDescribeUpdatableDataEnginesResult> Invoke(GetDlcDescribeUpdatableDataEnginesInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetDlcDescribeUpdatableDataEnginesResult> getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions options)
public static Output<GetDlcDescribeUpdatableDataEnginesResult> getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions options)
fn::invoke:
function: tencentcloud:index/getDlcDescribeUpdatableDataEngines:getDlcDescribeUpdatableDataEngines
arguments:
# arguments dictionary
The following arguments are supported:
- Data
Engine stringConfig Command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- Id string
- Result
Output stringFile - Used to save results.
- Data
Engine stringConfig Command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- Id string
- Result
Output stringFile - Used to save results.
- data
Engine StringConfig Command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- id String
- result
Output StringFile - Used to save results.
- data
Engine stringConfig Command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- id string
- result
Output stringFile - Used to save results.
- data_
engine_ strconfig_ command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- id str
- result_
output_ strfile - Used to save results.
- data
Engine StringConfig Command - Operation commands of engine configuration. UpdateSparkSQLLakefsPath updates the path of managed tables, and UpdateSparkSQLResultPath updates the path of result buckets.
- id String
- result
Output StringFile - Used to save results.
getDlcDescribeUpdatableDataEngines Result
The following output properties are available:
- Data
Engine List<GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info> - Basic cluster information.
- Data
Engine stringConfig Command - Id string
- Result
Output stringFile
- Data
Engine []GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info - Basic cluster information.
- Data
Engine stringConfig Command - Id string
- Result
Output stringFile
- data
Engine List<GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info> - Basic cluster information.
- data
Engine StringConfig Command - id String
- result
Output StringFile
- data
Engine GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info[] - Basic cluster information.
- data
Engine stringConfig Command - id string
- result
Output stringFile
- data_
engine_ Sequence[Getbasic_ infos Dlc Describe Updatable Data Engines Data Engine Basic Info] - Basic cluster information.
- data_
engine_ strconfig_ command - id str
- result_
output_ strfile
- data
Engine List<Property Map>Basic Infos - Basic cluster information.
- data
Engine StringConfig Command - id String
- result
Output StringFile
Supporting Types
GetDlcDescribeUpdatableDataEnginesDataEngineBasicInfo
- App
Id double - User ID.
- Create
Time double - Create time.
- Data
Engine stringId - Engine ID.
- Data
Engine stringName - DataEngine name.
- Data
Engine stringType - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- Message string
- Returned information.
- State double
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- Update
Time double - Update time.
- User
Uin string - Account uin.
- App
Id float64 - User ID.
- Create
Time float64 - Create time.
- Data
Engine stringId - Engine ID.
- Data
Engine stringName - DataEngine name.
- Data
Engine stringType - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- Message string
- Returned information.
- State float64
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- Update
Time float64 - Update time.
- User
Uin string - Account uin.
- app
Id Double - User ID.
- create
Time Double - Create time.
- data
Engine StringId - Engine ID.
- data
Engine StringName - DataEngine name.
- data
Engine StringType - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- message String
- Returned information.
- state Double
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- update
Time Double - Update time.
- user
Uin String - Account uin.
- app
Id number - User ID.
- create
Time number - Create time.
- data
Engine stringId - Engine ID.
- data
Engine stringName - DataEngine name.
- data
Engine stringType - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- message string
- Returned information.
- state number
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- update
Time number - Update time.
- user
Uin string - Account uin.
- app_
id float - User ID.
- create_
time float - Create time.
- data_
engine_ strid - Engine ID.
- data_
engine_ strname - DataEngine name.
- data_
engine_ strtype - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- message str
- Returned information.
- state float
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- update_
time float - Update time.
- user_
uin str - Account uin.
- app
Id Number - User ID.
- create
Time Number - Create time.
- data
Engine StringId - Engine ID.
- data
Engine StringName - DataEngine name.
- data
Engine StringType - Engine types, and the valid values are PrestoSQL, SparkSQL, and SparkBatch.
- message String
- Returned information.
- state Number
- EData engine status: -2: deleted; -1: failed; 0: initializing; 1: suspended; 2: running; 3: ready to delete; 4: deleting.
- update
Time Number - Update time.
- user
Uin String - Account uin.
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the
tencentcloud
Terraform Provider.
tencentcloud 1.82.16 published on Friday, Aug 8, 2025 by tencentcloudstack