tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack
tencentcloud.getDlcDescribeUpdatableDataEngines
Explore with Pulumi AI
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack
Use this data source to query detailed information of dlc describe_updatable_data_engines
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const describeUpdatableDataEngines = tencentcloud.getDlcDescribeUpdatableDataEngines({
dataEngineConfigCommand: "UpdateSparkSQLLakefsPath",
});
import pulumi
import pulumi_tencentcloud as tencentcloud
describe_updatable_data_engines = tencentcloud.get_dlc_describe_updatable_data_engines(data_engine_config_command="UpdateSparkSQLLakefsPath")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := tencentcloud.GetDlcDescribeUpdatableDataEngines(ctx, &tencentcloud.GetDlcDescribeUpdatableDataEnginesArgs{
DataEngineConfigCommand: "UpdateSparkSQLLakefsPath",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() =>
{
var describeUpdatableDataEngines = Tencentcloud.GetDlcDescribeUpdatableDataEngines.Invoke(new()
{
DataEngineConfigCommand = "UpdateSparkSQLLakefsPath",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.TencentcloudFunctions;
import com.pulumi.tencentcloud.inputs.GetDlcDescribeUpdatableDataEnginesArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var describeUpdatableDataEngines = TencentcloudFunctions.getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs.builder()
.dataEngineConfigCommand("UpdateSparkSQLLakefsPath")
.build());
}
}
variables:
describeUpdatableDataEngines:
fn::invoke:
function: tencentcloud:getDlcDescribeUpdatableDataEngines
arguments:
dataEngineConfigCommand: UpdateSparkSQLLakefsPath
Using getDlcDescribeUpdatableDataEngines
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getDlcDescribeUpdatableDataEngines(args: GetDlcDescribeUpdatableDataEnginesArgs, opts?: InvokeOptions): Promise<GetDlcDescribeUpdatableDataEnginesResult>
function getDlcDescribeUpdatableDataEnginesOutput(args: GetDlcDescribeUpdatableDataEnginesOutputArgs, opts?: InvokeOptions): Output<GetDlcDescribeUpdatableDataEnginesResult>
def get_dlc_describe_updatable_data_engines(data_engine_config_command: Optional[str] = None,
id: Optional[str] = None,
result_output_file: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetDlcDescribeUpdatableDataEnginesResult
def get_dlc_describe_updatable_data_engines_output(data_engine_config_command: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
result_output_file: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetDlcDescribeUpdatableDataEnginesResult]
func GetDlcDescribeUpdatableDataEngines(ctx *Context, args *GetDlcDescribeUpdatableDataEnginesArgs, opts ...InvokeOption) (*GetDlcDescribeUpdatableDataEnginesResult, error)
func GetDlcDescribeUpdatableDataEnginesOutput(ctx *Context, args *GetDlcDescribeUpdatableDataEnginesOutputArgs, opts ...InvokeOption) GetDlcDescribeUpdatableDataEnginesResultOutput
> Note: This function is named GetDlcDescribeUpdatableDataEngines
in the Go SDK.
public static class GetDlcDescribeUpdatableDataEngines
{
public static Task<GetDlcDescribeUpdatableDataEnginesResult> InvokeAsync(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions? opts = null)
public static Output<GetDlcDescribeUpdatableDataEnginesResult> Invoke(GetDlcDescribeUpdatableDataEnginesInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetDlcDescribeUpdatableDataEnginesResult> getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions options)
public static Output<GetDlcDescribeUpdatableDataEnginesResult> getDlcDescribeUpdatableDataEngines(GetDlcDescribeUpdatableDataEnginesArgs args, InvokeOptions options)
fn::invoke:
function: tencentcloud:index/getDlcDescribeUpdatableDataEngines:getDlcDescribeUpdatableDataEngines
arguments:
# arguments dictionary
The following arguments are supported:
- Data
Engine stringConfig Command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- Id string
- Result
Output stringFile - Used to save results.
- Data
Engine stringConfig Command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- Id string
- Result
Output stringFile - Used to save results.
- data
Engine StringConfig Command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- id String
- result
Output StringFile - Used to save results.
- data
Engine stringConfig Command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- id string
- result
Output stringFile - Used to save results.
- data_
engine_ strconfig_ command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- id str
- result_
output_ strfile - Used to save results.
- data
Engine StringConfig Command - Engine configuration operation command, UpdateSparkSQLLakefsPath updates the managed table path, UpdateSparkSQLResultPath updates the result bucket path.
- id String
- result
Output StringFile - Used to save results.
getDlcDescribeUpdatableDataEngines Result
The following output properties are available:
- Data
Engine List<GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info> - Engine basic information.
- Data
Engine stringConfig Command - Id string
- Result
Output stringFile
- Data
Engine []GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info - Engine basic information.
- Data
Engine stringConfig Command - Id string
- Result
Output stringFile
- data
Engine List<GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info> - Engine basic information.
- data
Engine StringConfig Command - id String
- result
Output StringFile
- data
Engine GetBasic Infos Dlc Describe Updatable Data Engines Data Engine Basic Info[] - Engine basic information.
- data
Engine stringConfig Command - id string
- result
Output stringFile
- data_
engine_ Sequence[Getbasic_ infos Dlc Describe Updatable Data Engines Data Engine Basic Info] - Engine basic information.
- data_
engine_ strconfig_ command - id str
- result_
output_ strfile
- data
Engine List<Property Map>Basic Infos - Engine basic information.
- data
Engine StringConfig Command - id String
- result
Output StringFile
Supporting Types
GetDlcDescribeUpdatableDataEnginesDataEngineBasicInfo
- App
Id double - User unique ID.
- Create
Time double - Create time.
- Data
Engine stringId - Engine unique id.
- Data
Engine stringName - Engine name.
- Data
Engine stringType - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- Message string
- Returned messages.
- State double
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- Update
Time double - Update time.
- User
Uin string - User unique uin.
- App
Id float64 - User unique ID.
- Create
Time float64 - Create time.
- Data
Engine stringId - Engine unique id.
- Data
Engine stringName - Engine name.
- Data
Engine stringType - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- Message string
- Returned messages.
- State float64
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- Update
Time float64 - Update time.
- User
Uin string - User unique uin.
- app
Id Double - User unique ID.
- create
Time Double - Create time.
- data
Engine StringId - Engine unique id.
- data
Engine StringName - Engine name.
- data
Engine StringType - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- message String
- Returned messages.
- state Double
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- update
Time Double - Update time.
- user
Uin String - User unique uin.
- app
Id number - User unique ID.
- create
Time number - Create time.
- data
Engine stringId - Engine unique id.
- data
Engine stringName - Engine name.
- data
Engine stringType - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- message string
- Returned messages.
- state number
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- update
Time number - Update time.
- user
Uin string - User unique uin.
- app_
id float - User unique ID.
- create_
time float - Create time.
- data_
engine_ strid - Engine unique id.
- data_
engine_ strname - Engine name.
- data_
engine_ strtype - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- message str
- Returned messages.
- state float
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- update_
time float - Update time.
- user_
uin str - User unique uin.
- app
Id Number - User unique ID.
- create
Time Number - Create time.
- data
Engine StringId - Engine unique id.
- data
Engine StringName - Engine name.
- data
Engine StringType - Engine type, valid values: PrestoSQL/SparkSQL/SparkBatch.
- message String
- Returned messages.
- state Number
- Engine state, only support: 0:Init/-1:Failed/-2:Deleted/1:Pause/2:Running/3:ToBeDelete/4:Deleting.
- update
Time Number - Update time.
- user
Uin String - User unique uin.
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the
tencentcloud
Terraform Provider.
tencentcloud 1.81.189 published on Wednesday, Apr 30, 2025 by tencentcloudstack