gcp.dataproc.SessionTemplate
Explore with Pulumi AI
A Dataproc Serverless session template defines the configuration settings for creating one or more Dataproc Serverless interactive sessions.
To get more information about SessionTemplate, see:
Example Usage
Dataproc Session Templates Jupyter
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const exampleSessionTemplatesJupyter = new gcp.dataproc.SessionTemplate("example_session_templates_jupyter", {
name: "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
location: "us-central1",
labels: {
session_template_test: "terraform",
},
runtimeConfig: {
properties: {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
},
environmentConfig: {
executionConfig: {
subnetworkUri: "default",
ttl: "3600s",
networkTags: ["tag1"],
},
},
jupyterSession: {
kernel: "PYTHON",
displayName: "tf python kernel",
},
});
import pulumi
import pulumi_gcp as gcp
example_session_templates_jupyter = gcp.dataproc.SessionTemplate("example_session_templates_jupyter",
name="projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
location="us-central1",
labels={
"session_template_test": "terraform",
},
runtime_config={
"properties": {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
},
environment_config={
"execution_config": {
"subnetwork_uri": "default",
"ttl": "3600s",
"network_tags": ["tag1"],
},
},
jupyter_session={
"kernel": "PYTHON",
"display_name": "tf python kernel",
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewSessionTemplate(ctx, "example_session_templates_jupyter", &dataproc.SessionTemplateArgs{
Name: pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template"),
Location: pulumi.String("us-central1"),
Labels: pulumi.StringMap{
"session_template_test": pulumi.String("terraform"),
},
RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
Properties: pulumi.StringMap{
"spark.dynamicAllocation.enabled": pulumi.String("false"),
"spark.executor.instances": pulumi.String("2"),
},
},
EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
SubnetworkUri: pulumi.String("default"),
Ttl: pulumi.String("3600s"),
NetworkTags: pulumi.StringArray{
pulumi.String("tag1"),
},
},
},
JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
Kernel: pulumi.String("PYTHON"),
DisplayName: pulumi.String("tf python kernel"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var exampleSessionTemplatesJupyter = new Gcp.Dataproc.SessionTemplate("example_session_templates_jupyter", new()
{
Name = "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
Location = "us-central1",
Labels =
{
{ "session_template_test", "terraform" },
},
RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
{
Properties =
{
{ "spark.dynamicAllocation.enabled", "false" },
{ "spark.executor.instances", "2" },
},
},
EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
{
ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
{
SubnetworkUri = "default",
Ttl = "3600s",
NetworkTags = new[]
{
"tag1",
},
},
},
JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
{
Kernel = "PYTHON",
DisplayName = "tf python kernel",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.SessionTemplate;
import com.pulumi.gcp.dataproc.SessionTemplateArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateJupyterSessionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleSessionTemplatesJupyter = new SessionTemplate("exampleSessionTemplatesJupyter", SessionTemplateArgs.builder()
.name("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template")
.location("us-central1")
.labels(Map.of("session_template_test", "terraform"))
.runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
.properties(Map.ofEntries(
Map.entry("spark.dynamicAllocation.enabled", "false"),
Map.entry("spark.executor.instances", "2")
))
.build())
.environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
.executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
.subnetworkUri("default")
.ttl("3600s")
.networkTags("tag1")
.build())
.build())
.jupyterSession(SessionTemplateJupyterSessionArgs.builder()
.kernel("PYTHON")
.displayName("tf python kernel")
.build())
.build());
}
}
resources:
exampleSessionTemplatesJupyter:
type: gcp:dataproc:SessionTemplate
name: example_session_templates_jupyter
properties:
name: projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template
location: us-central1
labels:
session_template_test: terraform
runtimeConfig:
properties:
spark.dynamicAllocation.enabled: 'false'
spark.executor.instances: '2'
environmentConfig:
executionConfig:
subnetworkUri: default
ttl: 3600s
networkTags:
- tag1
jupyterSession:
kernel: PYTHON
displayName: tf python kernel
Dataproc Session Templates Jupyter Full
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const project = gcp.organizations.getProject({});
const gcsAccount = gcp.storage.getProjectServiceAccount({});
const bucket = new gcp.storage.Bucket("bucket", {
uniformBucketLevelAccess: true,
name: "dataproc-bucket",
location: "US",
forceDestroy: true,
});
const cryptoKeyMember1 = new gcp.kms.CryptoKeyIAMMember("crypto_key_member_1", {
cryptoKeyId: "example-key",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
member: project.then(project => `serviceAccount:service-${project.number}@dataproc-accounts.iam.gserviceaccount.com`),
});
const ms = new gcp.dataproc.MetastoreService("ms", {
serviceId: "jupyter-session-template",
location: "us-central1",
port: 9080,
tier: "DEVELOPER",
maintenanceWindow: {
hourOfDay: 2,
dayOfWeek: "SUNDAY",
},
hiveMetastoreConfig: {
version: "3.1.2",
},
networkConfig: {
consumers: [{
subnetwork: "projects/my-project-name/regions/us-central1/subnetworks/default",
}],
},
});
const basic = new gcp.dataproc.Cluster("basic", {
name: "jupyter-session-template",
region: "us-central1",
clusterConfig: {
softwareConfig: {
overrideProperties: {
"dataproc:dataproc.allow.zero.workers": "true",
"spark:spark.history.fs.logDirectory": pulumi.interpolate`gs://${bucket.name}/*/spark-job-history`,
},
},
gceClusterConfig: {
subnetwork: "default",
},
endpointConfig: {
enableHttpPortAccess: true,
},
masterConfig: {
numInstances: 1,
machineType: "e2-standard-2",
diskConfig: {
bootDiskSizeGb: 35,
},
},
metastoreConfig: {
dataprocMetastoreService: ms.name,
},
},
});
const dataprocSessionTemplatesJupyterFull = new gcp.dataproc.SessionTemplate("dataproc_session_templates_jupyter_full", {
name: "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
location: "us-central1",
labels: {
session_template_test: "terraform",
},
runtimeConfig: {
properties: {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
version: "2.2",
containerImage: "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
},
environmentConfig: {
executionConfig: {
ttl: "3600s",
networkTags: ["tag1"],
kmsKey: "example-key",
subnetworkUri: "default",
serviceAccount: project.then(project => `${project.number}-compute@developer.gserviceaccount.com`),
stagingBucket: bucket.name,
},
peripheralsConfig: {
metastoreService: ms.name,
sparkHistoryServerConfig: {
dataprocCluster: basic.id,
},
},
},
jupyterSession: {
kernel: "PYTHON",
displayName: "tf python kernel",
},
}, {
dependsOn: [cryptoKeyMember1],
});
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
gcs_account = gcp.storage.get_project_service_account()
bucket = gcp.storage.Bucket("bucket",
uniform_bucket_level_access=True,
name="dataproc-bucket",
location="US",
force_destroy=True)
crypto_key_member1 = gcp.kms.CryptoKeyIAMMember("crypto_key_member_1",
crypto_key_id="example-key",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
member=f"serviceAccount:service-{project.number}@dataproc-accounts.iam.gserviceaccount.com")
ms = gcp.dataproc.MetastoreService("ms",
service_id="jupyter-session-template",
location="us-central1",
port=9080,
tier="DEVELOPER",
maintenance_window={
"hour_of_day": 2,
"day_of_week": "SUNDAY",
},
hive_metastore_config={
"version": "3.1.2",
},
network_config={
"consumers": [{
"subnetwork": "projects/my-project-name/regions/us-central1/subnetworks/default",
}],
})
basic = gcp.dataproc.Cluster("basic",
name="jupyter-session-template",
region="us-central1",
cluster_config={
"software_config": {
"override_properties": {
"dataproc:dataproc.allow.zero.workers": "true",
"spark:spark.history.fs.logDirectory": bucket.name.apply(lambda name: f"gs://{name}/*/spark-job-history"),
},
},
"gce_cluster_config": {
"subnetwork": "default",
},
"endpoint_config": {
"enable_http_port_access": True,
},
"master_config": {
"num_instances": 1,
"machine_type": "e2-standard-2",
"disk_config": {
"boot_disk_size_gb": 35,
},
},
"metastore_config": {
"dataproc_metastore_service": ms.name,
},
})
dataproc_session_templates_jupyter_full = gcp.dataproc.SessionTemplate("dataproc_session_templates_jupyter_full",
name="projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
location="us-central1",
labels={
"session_template_test": "terraform",
},
runtime_config={
"properties": {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
"version": "2.2",
"container_image": "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
},
environment_config={
"execution_config": {
"ttl": "3600s",
"network_tags": ["tag1"],
"kms_key": "example-key",
"subnetwork_uri": "default",
"service_account": f"{project.number}-compute@developer.gserviceaccount.com",
"staging_bucket": bucket.name,
},
"peripherals_config": {
"metastore_service": ms.name,
"spark_history_server_config": {
"dataproc_cluster": basic.id,
},
},
},
jupyter_session={
"kernel": "PYTHON",
"display_name": "tf python kernel",
},
opts = pulumi.ResourceOptions(depends_on=[crypto_key_member1]))
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
if err != nil {
return err
}
_, err = storage.GetProjectServiceAccount(ctx, &storage.GetProjectServiceAccountArgs{}, nil)
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
UniformBucketLevelAccess: pulumi.Bool(true),
Name: pulumi.String("dataproc-bucket"),
Location: pulumi.String("US"),
ForceDestroy: pulumi.Bool(true),
})
if err != nil {
return err
}
cryptoKeyMember1, err := kms.NewCryptoKeyIAMMember(ctx, "crypto_key_member_1", &kms.CryptoKeyIAMMemberArgs{
CryptoKeyId: pulumi.String("example-key"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Member: pulumi.Sprintf("serviceAccount:service-%v@dataproc-accounts.iam.gserviceaccount.com", project.Number),
})
if err != nil {
return err
}
ms, err := dataproc.NewMetastoreService(ctx, "ms", &dataproc.MetastoreServiceArgs{
ServiceId: pulumi.String("jupyter-session-template"),
Location: pulumi.String("us-central1"),
Port: pulumi.Int(9080),
Tier: pulumi.String("DEVELOPER"),
MaintenanceWindow: &dataproc.MetastoreServiceMaintenanceWindowArgs{
HourOfDay: pulumi.Int(2),
DayOfWeek: pulumi.String("SUNDAY"),
},
HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
Version: pulumi.String("3.1.2"),
},
NetworkConfig: &dataproc.MetastoreServiceNetworkConfigArgs{
Consumers: dataproc.MetastoreServiceNetworkConfigConsumerArray{
&dataproc.MetastoreServiceNetworkConfigConsumerArgs{
Subnetwork: pulumi.String("projects/my-project-name/regions/us-central1/subnetworks/default"),
},
},
},
})
if err != nil {
return err
}
basic, err := dataproc.NewCluster(ctx, "basic", &dataproc.ClusterArgs{
Name: pulumi.String("jupyter-session-template"),
Region: pulumi.String("us-central1"),
ClusterConfig: &dataproc.ClusterClusterConfigArgs{
SoftwareConfig: &dataproc.ClusterClusterConfigSoftwareConfigArgs{
OverrideProperties: pulumi.StringMap{
"dataproc:dataproc.allow.zero.workers": pulumi.String("true"),
"spark:spark.history.fs.logDirectory": bucket.Name.ApplyT(func(name string) (string, error) {
return fmt.Sprintf("gs://%v/*/spark-job-history", name), nil
}).(pulumi.StringOutput),
},
},
GceClusterConfig: &dataproc.ClusterClusterConfigGceClusterConfigArgs{
Subnetwork: pulumi.String("default"),
},
EndpointConfig: &dataproc.ClusterClusterConfigEndpointConfigArgs{
EnableHttpPortAccess: pulumi.Bool(true),
},
MasterConfig: &dataproc.ClusterClusterConfigMasterConfigArgs{
NumInstances: pulumi.Int(1),
MachineType: pulumi.String("e2-standard-2"),
DiskConfig: &dataproc.ClusterClusterConfigMasterConfigDiskConfigArgs{
BootDiskSizeGb: pulumi.Int(35),
},
},
MetastoreConfig: &dataproc.ClusterClusterConfigMetastoreConfigArgs{
DataprocMetastoreService: ms.Name,
},
},
})
if err != nil {
return err
}
_, err = dataproc.NewSessionTemplate(ctx, "dataproc_session_templates_jupyter_full", &dataproc.SessionTemplateArgs{
Name: pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template"),
Location: pulumi.String("us-central1"),
Labels: pulumi.StringMap{
"session_template_test": pulumi.String("terraform"),
},
RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
Properties: pulumi.StringMap{
"spark.dynamicAllocation.enabled": pulumi.String("false"),
"spark.executor.instances": pulumi.String("2"),
},
Version: pulumi.String("2.2"),
ContainerImage: pulumi.String("us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest"),
},
EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
Ttl: pulumi.String("3600s"),
NetworkTags: pulumi.StringArray{
pulumi.String("tag1"),
},
KmsKey: pulumi.String("example-key"),
SubnetworkUri: pulumi.String("default"),
ServiceAccount: pulumi.Sprintf("%v-compute@developer.gserviceaccount.com", project.Number),
StagingBucket: bucket.Name,
},
PeripheralsConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigArgs{
MetastoreService: ms.Name,
SparkHistoryServerConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs{
DataprocCluster: basic.ID(),
},
},
},
JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
Kernel: pulumi.String("PYTHON"),
DisplayName: pulumi.String("tf python kernel"),
},
}, pulumi.DependsOn([]pulumi.Resource{
cryptoKeyMember1,
}))
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var gcsAccount = Gcp.Storage.GetProjectServiceAccount.Invoke();
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
UniformBucketLevelAccess = true,
Name = "dataproc-bucket",
Location = "US",
ForceDestroy = true,
});
var cryptoKeyMember1 = new Gcp.Kms.CryptoKeyIAMMember("crypto_key_member_1", new()
{
CryptoKeyId = "example-key",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@dataproc-accounts.iam.gserviceaccount.com",
});
var ms = new Gcp.Dataproc.MetastoreService("ms", new()
{
ServiceId = "jupyter-session-template",
Location = "us-central1",
Port = 9080,
Tier = "DEVELOPER",
MaintenanceWindow = new Gcp.Dataproc.Inputs.MetastoreServiceMaintenanceWindowArgs
{
HourOfDay = 2,
DayOfWeek = "SUNDAY",
},
HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
{
Version = "3.1.2",
},
NetworkConfig = new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigArgs
{
Consumers = new[]
{
new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigConsumerArgs
{
Subnetwork = "projects/my-project-name/regions/us-central1/subnetworks/default",
},
},
},
});
var basic = new Gcp.Dataproc.Cluster("basic", new()
{
Name = "jupyter-session-template",
Region = "us-central1",
ClusterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigArgs
{
SoftwareConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigSoftwareConfigArgs
{
OverrideProperties =
{
{ "dataproc:dataproc.allow.zero.workers", "true" },
{ "spark:spark.history.fs.logDirectory", bucket.Name.Apply(name => $"gs://{name}/*/spark-job-history") },
},
},
GceClusterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigGceClusterConfigArgs
{
Subnetwork = "default",
},
EndpointConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigEndpointConfigArgs
{
EnableHttpPortAccess = true,
},
MasterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigArgs
{
NumInstances = 1,
MachineType = "e2-standard-2",
DiskConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigDiskConfigArgs
{
BootDiskSizeGb = 35,
},
},
MetastoreConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMetastoreConfigArgs
{
DataprocMetastoreService = ms.Name,
},
},
});
var dataprocSessionTemplatesJupyterFull = new Gcp.Dataproc.SessionTemplate("dataproc_session_templates_jupyter_full", new()
{
Name = "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
Location = "us-central1",
Labels =
{
{ "session_template_test", "terraform" },
},
RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
{
Properties =
{
{ "spark.dynamicAllocation.enabled", "false" },
{ "spark.executor.instances", "2" },
},
Version = "2.2",
ContainerImage = "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
},
EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
{
ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
{
Ttl = "3600s",
NetworkTags = new[]
{
"tag1",
},
KmsKey = "example-key",
SubnetworkUri = "default",
ServiceAccount = $"{project.Apply(getProjectResult => getProjectResult.Number)}-compute@developer.gserviceaccount.com",
StagingBucket = bucket.Name,
},
PeripheralsConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs
{
MetastoreService = ms.Name,
SparkHistoryServerConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs
{
DataprocCluster = basic.Id,
},
},
},
JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
{
Kernel = "PYTHON",
DisplayName = "tf python kernel",
},
}, new CustomResourceOptions
{
DependsOn =
{
cryptoKeyMember1,
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.storage.StorageFunctions;
import com.pulumi.gcp.storage.inputs.GetProjectServiceAccountArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.dataproc.MetastoreService;
import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
import com.pulumi.gcp.dataproc.inputs.MetastoreServiceMaintenanceWindowArgs;
import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
import com.pulumi.gcp.dataproc.inputs.MetastoreServiceNetworkConfigArgs;
import com.pulumi.gcp.dataproc.Cluster;
import com.pulumi.gcp.dataproc.ClusterArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigSoftwareConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigGceClusterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigEndpointConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigDiskConfigArgs;
import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMetastoreConfigArgs;
import com.pulumi.gcp.dataproc.SessionTemplate;
import com.pulumi.gcp.dataproc.SessionTemplateArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateJupyterSessionArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
.build());
final var gcsAccount = StorageFunctions.getProjectServiceAccount(GetProjectServiceAccountArgs.builder()
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.uniformBucketLevelAccess(true)
.name("dataproc-bucket")
.location("US")
.forceDestroy(true)
.build());
var cryptoKeyMember1 = new CryptoKeyIAMMember("cryptoKeyMember1", CryptoKeyIAMMemberArgs.builder()
.cryptoKeyId("example-key")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.member(String.format("serviceAccount:service-%s@dataproc-accounts.iam.gserviceaccount.com", project.number()))
.build());
var ms = new MetastoreService("ms", MetastoreServiceArgs.builder()
.serviceId("jupyter-session-template")
.location("us-central1")
.port(9080)
.tier("DEVELOPER")
.maintenanceWindow(MetastoreServiceMaintenanceWindowArgs.builder()
.hourOfDay(2)
.dayOfWeek("SUNDAY")
.build())
.hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
.version("3.1.2")
.build())
.networkConfig(MetastoreServiceNetworkConfigArgs.builder()
.consumers(MetastoreServiceNetworkConfigConsumerArgs.builder()
.subnetwork("projects/my-project-name/regions/us-central1/subnetworks/default")
.build())
.build())
.build());
var basic = new Cluster("basic", ClusterArgs.builder()
.name("jupyter-session-template")
.region("us-central1")
.clusterConfig(ClusterClusterConfigArgs.builder()
.softwareConfig(ClusterClusterConfigSoftwareConfigArgs.builder()
.overrideProperties(Map.ofEntries(
Map.entry("dataproc:dataproc.allow.zero.workers", "true"),
Map.entry("spark:spark.history.fs.logDirectory", bucket.name().applyValue(_name -> String.format("gs://%s/*/spark-job-history", _name)))
))
.build())
.gceClusterConfig(ClusterClusterConfigGceClusterConfigArgs.builder()
.subnetwork("default")
.build())
.endpointConfig(ClusterClusterConfigEndpointConfigArgs.builder()
.enableHttpPortAccess(true)
.build())
.masterConfig(ClusterClusterConfigMasterConfigArgs.builder()
.numInstances(1)
.machineType("e2-standard-2")
.diskConfig(ClusterClusterConfigMasterConfigDiskConfigArgs.builder()
.bootDiskSizeGb(35)
.build())
.build())
.metastoreConfig(ClusterClusterConfigMetastoreConfigArgs.builder()
.dataprocMetastoreService(ms.name())
.build())
.build())
.build());
var dataprocSessionTemplatesJupyterFull = new SessionTemplate("dataprocSessionTemplatesJupyterFull", SessionTemplateArgs.builder()
.name("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template")
.location("us-central1")
.labels(Map.of("session_template_test", "terraform"))
.runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
.properties(Map.ofEntries(
Map.entry("spark.dynamicAllocation.enabled", "false"),
Map.entry("spark.executor.instances", "2")
))
.version("2.2")
.containerImage("us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest")
.build())
.environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
.executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
.ttl("3600s")
.networkTags("tag1")
.kmsKey("example-key")
.subnetworkUri("default")
.serviceAccount(String.format("%s-compute@developer.gserviceaccount.com", project.number()))
.stagingBucket(bucket.name())
.build())
.peripheralsConfig(SessionTemplateEnvironmentConfigPeripheralsConfigArgs.builder()
.metastoreService(ms.name())
.sparkHistoryServerConfig(SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs.builder()
.dataprocCluster(basic.id())
.build())
.build())
.build())
.jupyterSession(SessionTemplateJupyterSessionArgs.builder()
.kernel("PYTHON")
.displayName("tf python kernel")
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(cryptoKeyMember1)
.build());
}
}
resources:
dataprocSessionTemplatesJupyterFull:
type: gcp:dataproc:SessionTemplate
name: dataproc_session_templates_jupyter_full
properties:
name: projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template
location: us-central1
labels:
session_template_test: terraform
runtimeConfig:
properties:
spark.dynamicAllocation.enabled: 'false'
spark.executor.instances: '2'
version: '2.2'
containerImage: us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest
environmentConfig:
executionConfig:
ttl: 3600s
networkTags:
- tag1
kmsKey: example-key
subnetworkUri: default
serviceAccount: ${project.number}-compute@developer.gserviceaccount.com
stagingBucket: ${bucket.name}
peripheralsConfig:
metastoreService: ${ms.name}
sparkHistoryServerConfig:
dataprocCluster: ${basic.id}
jupyterSession:
kernel: PYTHON
displayName: tf python kernel
options:
dependsOn:
- ${cryptoKeyMember1}
bucket:
type: gcp:storage:Bucket
properties:
uniformBucketLevelAccess: true
name: dataproc-bucket
location: US
forceDestroy: true
cryptoKeyMember1:
type: gcp:kms:CryptoKeyIAMMember
name: crypto_key_member_1
properties:
cryptoKeyId: example-key
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
member: serviceAccount:service-${project.number}@dataproc-accounts.iam.gserviceaccount.com
basic:
type: gcp:dataproc:Cluster
properties:
name: jupyter-session-template
region: us-central1
clusterConfig:
softwareConfig:
overrideProperties:
dataproc:dataproc.allow.zero.workers: 'true'
spark:spark.history.fs.logDirectory: gs://${bucket.name}/*/spark-job-history
gceClusterConfig:
subnetwork: default
endpointConfig:
enableHttpPortAccess: true
masterConfig:
numInstances: 1
machineType: e2-standard-2
diskConfig:
bootDiskSizeGb: 35
metastoreConfig:
dataprocMetastoreService: ${ms.name}
ms:
type: gcp:dataproc:MetastoreService
properties:
serviceId: jupyter-session-template
location: us-central1
port: 9080
tier: DEVELOPER
maintenanceWindow:
hourOfDay: 2
dayOfWeek: SUNDAY
hiveMetastoreConfig:
version: 3.1.2
networkConfig:
consumers:
- subnetwork: projects/my-project-name/regions/us-central1/subnetworks/default
variables:
project:
fn::invoke:
function: gcp:organizations:getProject
arguments: {}
gcsAccount:
fn::invoke:
function: gcp:storage:getProjectServiceAccount
arguments: {}
Dataproc Session Templates Spark Connect
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const exampleSessionTemplatesSparkConnect = new gcp.dataproc.SessionTemplate("example_session_templates_spark_connect", {
name: "projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
location: "us-central1",
labels: {
session_template_test: "terraform",
},
runtimeConfig: {
properties: {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
},
environmentConfig: {
executionConfig: {
subnetworkUri: "default",
ttl: "3600s",
networkTags: ["tag1"],
},
},
});
import pulumi
import pulumi_gcp as gcp
example_session_templates_spark_connect = gcp.dataproc.SessionTemplate("example_session_templates_spark_connect",
name="projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
location="us-central1",
labels={
"session_template_test": "terraform",
},
runtime_config={
"properties": {
"spark.dynamicAllocation.enabled": "false",
"spark.executor.instances": "2",
},
},
environment_config={
"execution_config": {
"subnetwork_uri": "default",
"ttl": "3600s",
"network_tags": ["tag1"],
},
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewSessionTemplate(ctx, "example_session_templates_spark_connect", &dataproc.SessionTemplateArgs{
Name: pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template"),
Location: pulumi.String("us-central1"),
Labels: pulumi.StringMap{
"session_template_test": pulumi.String("terraform"),
},
RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
Properties: pulumi.StringMap{
"spark.dynamicAllocation.enabled": pulumi.String("false"),
"spark.executor.instances": pulumi.String("2"),
},
},
EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
SubnetworkUri: pulumi.String("default"),
Ttl: pulumi.String("3600s"),
NetworkTags: pulumi.StringArray{
pulumi.String("tag1"),
},
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var exampleSessionTemplatesSparkConnect = new Gcp.Dataproc.SessionTemplate("example_session_templates_spark_connect", new()
{
Name = "projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
Location = "us-central1",
Labels =
{
{ "session_template_test", "terraform" },
},
RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
{
Properties =
{
{ "spark.dynamicAllocation.enabled", "false" },
{ "spark.executor.instances", "2" },
},
},
EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
{
ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
{
SubnetworkUri = "default",
Ttl = "3600s",
NetworkTags = new[]
{
"tag1",
},
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.SessionTemplate;
import com.pulumi.gcp.dataproc.SessionTemplateArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var exampleSessionTemplatesSparkConnect = new SessionTemplate("exampleSessionTemplatesSparkConnect", SessionTemplateArgs.builder()
.name("projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template")
.location("us-central1")
.labels(Map.of("session_template_test", "terraform"))
.runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
.properties(Map.ofEntries(
Map.entry("spark.dynamicAllocation.enabled", "false"),
Map.entry("spark.executor.instances", "2")
))
.build())
.environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
.executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
.subnetworkUri("default")
.ttl("3600s")
.networkTags("tag1")
.build())
.build())
.build());
}
}
resources:
exampleSessionTemplatesSparkConnect:
type: gcp:dataproc:SessionTemplate
name: example_session_templates_spark_connect
properties:
name: projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template
location: us-central1
labels:
session_template_test: terraform
runtimeConfig:
properties:
spark.dynamicAllocation.enabled: 'false'
spark.executor.instances: '2'
environmentConfig:
executionConfig:
subnetworkUri: default
ttl: 3600s
networkTags:
- tag1
Create SessionTemplate Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new SessionTemplate(name: string, args?: SessionTemplateArgs, opts?: CustomResourceOptions);
@overload
def SessionTemplate(resource_name: str,
args: Optional[SessionTemplateArgs] = None,
opts: Optional[ResourceOptions] = None)
@overload
def SessionTemplate(resource_name: str,
opts: Optional[ResourceOptions] = None,
environment_config: Optional[SessionTemplateEnvironmentConfigArgs] = None,
jupyter_session: Optional[SessionTemplateJupyterSessionArgs] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
runtime_config: Optional[SessionTemplateRuntimeConfigArgs] = None,
spark_connect_session: Optional[SessionTemplateSparkConnectSessionArgs] = None)
func NewSessionTemplate(ctx *Context, name string, args *SessionTemplateArgs, opts ...ResourceOption) (*SessionTemplate, error)
public SessionTemplate(string name, SessionTemplateArgs? args = null, CustomResourceOptions? opts = null)
public SessionTemplate(String name, SessionTemplateArgs args)
public SessionTemplate(String name, SessionTemplateArgs args, CustomResourceOptions options)
type: gcp:dataproc:SessionTemplate
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args SessionTemplateArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args SessionTemplateArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args SessionTemplateArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args SessionTemplateArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args SessionTemplateArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var sessionTemplateResource = new Gcp.Dataproc.SessionTemplate("sessionTemplateResource", new()
{
EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
{
ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
{
KmsKey = "string",
NetworkTags = new[]
{
"string",
},
ServiceAccount = "string",
StagingBucket = "string",
SubnetworkUri = "string",
Ttl = "string",
},
PeripheralsConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs
{
MetastoreService = "string",
SparkHistoryServerConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs
{
DataprocCluster = "string",
},
},
},
JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
{
DisplayName = "string",
Kernel = "string",
},
Labels =
{
{ "string", "string" },
},
Location = "string",
Name = "string",
Project = "string",
RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
{
ContainerImage = "string",
EffectiveProperties =
{
{ "string", "string" },
},
Properties =
{
{ "string", "string" },
},
Version = "string",
},
SparkConnectSession = null,
});
example, err := dataproc.NewSessionTemplate(ctx, "sessionTemplateResource", &dataproc.SessionTemplateArgs{
EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
KmsKey: pulumi.String("string"),
NetworkTags: pulumi.StringArray{
pulumi.String("string"),
},
ServiceAccount: pulumi.String("string"),
StagingBucket: pulumi.String("string"),
SubnetworkUri: pulumi.String("string"),
Ttl: pulumi.String("string"),
},
PeripheralsConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigArgs{
MetastoreService: pulumi.String("string"),
SparkHistoryServerConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs{
DataprocCluster: pulumi.String("string"),
},
},
},
JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
DisplayName: pulumi.String("string"),
Kernel: pulumi.String("string"),
},
Labels: pulumi.StringMap{
"string": pulumi.String("string"),
},
Location: pulumi.String("string"),
Name: pulumi.String("string"),
Project: pulumi.String("string"),
RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
ContainerImage: pulumi.String("string"),
EffectiveProperties: pulumi.StringMap{
"string": pulumi.String("string"),
},
Properties: pulumi.StringMap{
"string": pulumi.String("string"),
},
Version: pulumi.String("string"),
},
SparkConnectSession: &dataproc.SessionTemplateSparkConnectSessionArgs{},
})
var sessionTemplateResource = new SessionTemplate("sessionTemplateResource", SessionTemplateArgs.builder()
.environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
.executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
.kmsKey("string")
.networkTags("string")
.serviceAccount("string")
.stagingBucket("string")
.subnetworkUri("string")
.ttl("string")
.build())
.peripheralsConfig(SessionTemplateEnvironmentConfigPeripheralsConfigArgs.builder()
.metastoreService("string")
.sparkHistoryServerConfig(SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs.builder()
.dataprocCluster("string")
.build())
.build())
.build())
.jupyterSession(SessionTemplateJupyterSessionArgs.builder()
.displayName("string")
.kernel("string")
.build())
.labels(Map.of("string", "string"))
.location("string")
.name("string")
.project("string")
.runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
.containerImage("string")
.effectiveProperties(Map.of("string", "string"))
.properties(Map.of("string", "string"))
.version("string")
.build())
.sparkConnectSession(SessionTemplateSparkConnectSessionArgs.builder()
.build())
.build());
session_template_resource = gcp.dataproc.SessionTemplate("sessionTemplateResource",
environment_config={
"execution_config": {
"kms_key": "string",
"network_tags": ["string"],
"service_account": "string",
"staging_bucket": "string",
"subnetwork_uri": "string",
"ttl": "string",
},
"peripherals_config": {
"metastore_service": "string",
"spark_history_server_config": {
"dataproc_cluster": "string",
},
},
},
jupyter_session={
"display_name": "string",
"kernel": "string",
},
labels={
"string": "string",
},
location="string",
name="string",
project="string",
runtime_config={
"container_image": "string",
"effective_properties": {
"string": "string",
},
"properties": {
"string": "string",
},
"version": "string",
},
spark_connect_session={})
const sessionTemplateResource = new gcp.dataproc.SessionTemplate("sessionTemplateResource", {
environmentConfig: {
executionConfig: {
kmsKey: "string",
networkTags: ["string"],
serviceAccount: "string",
stagingBucket: "string",
subnetworkUri: "string",
ttl: "string",
},
peripheralsConfig: {
metastoreService: "string",
sparkHistoryServerConfig: {
dataprocCluster: "string",
},
},
},
jupyterSession: {
displayName: "string",
kernel: "string",
},
labels: {
string: "string",
},
location: "string",
name: "string",
project: "string",
runtimeConfig: {
containerImage: "string",
effectiveProperties: {
string: "string",
},
properties: {
string: "string",
},
version: "string",
},
sparkConnectSession: {},
});
type: gcp:dataproc:SessionTemplate
properties:
environmentConfig:
executionConfig:
kmsKey: string
networkTags:
- string
serviceAccount: string
stagingBucket: string
subnetworkUri: string
ttl: string
peripheralsConfig:
metastoreService: string
sparkHistoryServerConfig:
dataprocCluster: string
jupyterSession:
displayName: string
kernel: string
labels:
string: string
location: string
name: string
project: string
runtimeConfig:
containerImage: string
effectiveProperties:
string: string
properties:
string: string
version: string
sparkConnectSession: {}
SessionTemplate Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The SessionTemplate resource accepts the following input properties:
- Environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- Jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- Labels Dictionary<string, string>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location in which the session template will be created in.
- Name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- Spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- Environment
Config SessionTemplate Environment Config Args - Environment configuration for the session execution. Structure is documented below.
- Jupyter
Session SessionTemplate Jupyter Session Args - Jupyter configuration for an interactive session. Structure is documented below.
- Labels map[string]string
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location in which the session template will be created in.
- Name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Runtime
Config SessionTemplate Runtime Config Args - Runtime configuration for the session template. Structure is documented below.
- Spark
Connect SessionSession Template Spark Connect Session Args - Spark connect configuration for an interactive session.
- environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- labels Map<String,String>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location in which the session template will be created in.
- name String
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- labels {[key: string]: string}
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location string
- The location in which the session template will be created in.
- name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- environment_
config SessionTemplate Environment Config Args - Environment configuration for the session execution. Structure is documented below.
- jupyter_
session SessionTemplate Jupyter Session Args - Jupyter configuration for an interactive session. Structure is documented below.
- labels Mapping[str, str]
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location str
- The location in which the session template will be created in.
- name str
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- runtime_
config SessionTemplate Runtime Config Args - Runtime configuration for the session template. Structure is documented below.
- spark_
connect_ Sessionsession Template Spark Connect Session Args - Spark connect configuration for an interactive session.
- environment
Config Property Map - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session Property Map - Jupyter configuration for an interactive session. Structure is documented below.
- labels Map<String>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location in which the session template will be created in.
- name String
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- runtime
Config Property Map - Runtime configuration for the session template. Structure is documented below.
- spark
Connect Property MapSession - Spark connect configuration for an interactive session.
Outputs
All input properties are implicitly available as output properties. Additionally, the SessionTemplate resource produces the following output properties:
- Create
Time string - The time when the session template was created.
- Creator string
- The email address of the user who created the session template.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Update
Time string - The time when the session template was updated.
- Uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- Create
Time string - The time when the session template was created.
- Creator string
- The email address of the user who created the session template.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Update
Time string - The time when the session template was updated.
- Uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time String - The time when the session template was created.
- creator String
- The email address of the user who created the session template.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- update
Time String - The time when the session template was updated.
- uuid String
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time string - The time when the session template was created.
- creator string
- The email address of the user who created the session template.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id string
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- update
Time string - The time when the session template was updated.
- uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create_
time str - The time when the session template was created.
- creator str
- The email address of the user who created the session template.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id str
- The provider-assigned unique ID for this managed resource.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- update_
time str - The time when the session template was updated.
- uuid str
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time String - The time when the session template was created.
- creator String
- The email address of the user who created the session template.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- update
Time String - The time when the session template was updated.
- uuid String
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
Look up Existing SessionTemplate Resource
Get an existing SessionTemplate resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: SessionTemplateState, opts?: CustomResourceOptions): SessionTemplate
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
create_time: Optional[str] = None,
creator: Optional[str] = None,
effective_labels: Optional[Mapping[str, str]] = None,
environment_config: Optional[SessionTemplateEnvironmentConfigArgs] = None,
jupyter_session: Optional[SessionTemplateJupyterSessionArgs] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
pulumi_labels: Optional[Mapping[str, str]] = None,
runtime_config: Optional[SessionTemplateRuntimeConfigArgs] = None,
spark_connect_session: Optional[SessionTemplateSparkConnectSessionArgs] = None,
update_time: Optional[str] = None,
uuid: Optional[str] = None) -> SessionTemplate
func GetSessionTemplate(ctx *Context, name string, id IDInput, state *SessionTemplateState, opts ...ResourceOption) (*SessionTemplate, error)
public static SessionTemplate Get(string name, Input<string> id, SessionTemplateState? state, CustomResourceOptions? opts = null)
public static SessionTemplate get(String name, Output<String> id, SessionTemplateState state, CustomResourceOptions options)
resources: _: type: gcp:dataproc:SessionTemplate get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Create
Time string - The time when the session template was created.
- Creator string
- The email address of the user who created the session template.
- Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- Jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- Labels Dictionary<string, string>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location in which the session template will be created in.
- Name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- Spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- Update
Time string - The time when the session template was updated.
- Uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- Create
Time string - The time when the session template was created.
- Creator string
- The email address of the user who created the session template.
- Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Environment
Config SessionTemplate Environment Config Args - Environment configuration for the session execution. Structure is documented below.
- Jupyter
Session SessionTemplate Jupyter Session Args - Jupyter configuration for an interactive session. Structure is documented below.
- Labels map[string]string
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- Location string
- The location in which the session template will be created in.
- Name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Runtime
Config SessionTemplate Runtime Config Args - Runtime configuration for the session template. Structure is documented below.
- Spark
Connect SessionSession Template Spark Connect Session Args - Spark connect configuration for an interactive session.
- Update
Time string - The time when the session template was updated.
- Uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time String - The time when the session template was created.
- creator String
- The email address of the user who created the session template.
- effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- labels Map<String,String>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location in which the session template will be created in.
- name String
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- update
Time String - The time when the session template was updated.
- uuid String
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time string - The time when the session template was created.
- creator string
- The email address of the user who created the session template.
- effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment
Config SessionTemplate Environment Config - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session SessionTemplate Jupyter Session - Jupyter configuration for an interactive session. Structure is documented below.
- labels {[key: string]: string}
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location string
- The location in which the session template will be created in.
- name string
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- runtime
Config SessionTemplate Runtime Config - Runtime configuration for the session template. Structure is documented below.
- spark
Connect SessionSession Template Spark Connect Session - Spark connect configuration for an interactive session.
- update
Time string - The time when the session template was updated.
- uuid string
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create_
time str - The time when the session template was created.
- creator str
- The email address of the user who created the session template.
- effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment_
config SessionTemplate Environment Config Args - Environment configuration for the session execution. Structure is documented below.
- jupyter_
session SessionTemplate Jupyter Session Args - Jupyter configuration for an interactive session. Structure is documented below.
- labels Mapping[str, str]
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location str
- The location in which the session template will be created in.
- name str
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- runtime_
config SessionTemplate Runtime Config Args - Runtime configuration for the session template. Structure is documented below.
- spark_
connect_ Sessionsession Template Spark Connect Session Args - Spark connect configuration for an interactive session.
- update_
time str - The time when the session template was updated.
- uuid str
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
- create
Time String - The time when the session template was created.
- creator String
- The email address of the user who created the session template.
- effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- environment
Config Property Map - Environment configuration for the session execution. Structure is documented below.
- jupyter
Session Property Map - Jupyter configuration for an interactive session. Structure is documented below.
- labels Map<String>
The labels to associate with this session template.
Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field
effective_labels
for all of the labels present on the resource.- location String
- The location in which the session template will be created in.
- name String
- The resource name of the session template in the following format:
projects/{project}/locations/{location}/sessionTemplates/{template_id}
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- runtime
Config Property Map - Runtime configuration for the session template. Structure is documented below.
- spark
Connect Property MapSession - Spark connect configuration for an interactive session.
- update
Time String - The time when the session template was updated.
- uuid String
- A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
Supporting Types
SessionTemplateEnvironmentConfig, SessionTemplateEnvironmentConfigArgs
- Execution
Config SessionTemplate Environment Config Execution Config - Execution configuration for a workload. Structure is documented below.
- Peripherals
Config SessionTemplate Environment Config Peripherals Config - Peripherals configuration that workload has access to. Structure is documented below.
- Execution
Config SessionTemplate Environment Config Execution Config - Execution configuration for a workload. Structure is documented below.
- Peripherals
Config SessionTemplate Environment Config Peripherals Config - Peripherals configuration that workload has access to. Structure is documented below.
- execution
Config SessionTemplate Environment Config Execution Config - Execution configuration for a workload. Structure is documented below.
- peripherals
Config SessionTemplate Environment Config Peripherals Config - Peripherals configuration that workload has access to. Structure is documented below.
- execution
Config SessionTemplate Environment Config Execution Config - Execution configuration for a workload. Structure is documented below.
- peripherals
Config SessionTemplate Environment Config Peripherals Config - Peripherals configuration that workload has access to. Structure is documented below.
- execution_
config SessionTemplate Environment Config Execution Config - Execution configuration for a workload. Structure is documented below.
- peripherals_
config SessionTemplate Environment Config Peripherals Config - Peripherals configuration that workload has access to. Structure is documented below.
- execution
Config Property Map - Execution configuration for a workload. Structure is documented below.
- peripherals
Config Property Map - Peripherals configuration that workload has access to. Structure is documented below.
SessionTemplateEnvironmentConfigExecutionConfig, SessionTemplateEnvironmentConfigExecutionConfigArgs
- Kms
Key string - The Cloud KMS key to use for encryption.
- List<string>
- Tags used for network traffic control.
- Service
Account string - Service account that used to execute workload.
- Staging
Bucket string - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- Subnetwork
Uri string - Subnetwork configuration for workload execution.
- Ttl string
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
- Kms
Key string - The Cloud KMS key to use for encryption.
- []string
- Tags used for network traffic control.
- Service
Account string - Service account that used to execute workload.
- Staging
Bucket string - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- Subnetwork
Uri string - Subnetwork configuration for workload execution.
- Ttl string
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
- kms
Key String - The Cloud KMS key to use for encryption.
- List<String>
- Tags used for network traffic control.
- service
Account String - Service account that used to execute workload.
- staging
Bucket String - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- subnetwork
Uri String - Subnetwork configuration for workload execution.
- ttl String
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
- kms
Key string - The Cloud KMS key to use for encryption.
- string[]
- Tags used for network traffic control.
- service
Account string - Service account that used to execute workload.
- staging
Bucket string - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- subnetwork
Uri string - Subnetwork configuration for workload execution.
- ttl string
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
- kms_
key str - The Cloud KMS key to use for encryption.
- Sequence[str]
- Tags used for network traffic control.
- service_
account str - Service account that used to execute workload.
- staging_
bucket str - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- subnetwork_
uri str - Subnetwork configuration for workload execution.
- ttl str
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
- kms
Key String - The Cloud KMS key to use for encryption.
- List<String>
- Tags used for network traffic control.
- service
Account String - Service account that used to execute workload.
- staging
Bucket String - A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
- subnetwork
Uri String - Subnetwork configuration for workload execution.
- ttl String
- The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
SessionTemplateEnvironmentConfigPeripheralsConfig, SessionTemplateEnvironmentConfigPeripheralsConfigArgs
- Metastore
Service string - Resource name of an existing Dataproc Metastore service.
- Spark
History SessionServer Config Template Environment Config Peripherals Config Spark History Server Config - The Spark History Server configuration for the workload. Structure is documented below.
- Metastore
Service string - Resource name of an existing Dataproc Metastore service.
- Spark
History SessionServer Config Template Environment Config Peripherals Config Spark History Server Config - The Spark History Server configuration for the workload. Structure is documented below.
- metastore
Service String - Resource name of an existing Dataproc Metastore service.
- spark
History SessionServer Config Template Environment Config Peripherals Config Spark History Server Config - The Spark History Server configuration for the workload. Structure is documented below.
- metastore
Service string - Resource name of an existing Dataproc Metastore service.
- spark
History SessionServer Config Template Environment Config Peripherals Config Spark History Server Config - The Spark History Server configuration for the workload. Structure is documented below.
- metastore_
service str - Resource name of an existing Dataproc Metastore service.
- spark_
history_ Sessionserver_ config Template Environment Config Peripherals Config Spark History Server Config - The Spark History Server configuration for the workload. Structure is documented below.
- metastore
Service String - Resource name of an existing Dataproc Metastore service.
- spark
History Property MapServer Config - The Spark History Server configuration for the workload. Structure is documented below.
SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig, SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs
- Dataproc
Cluster string - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
- Dataproc
Cluster string - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
- dataproc
Cluster String - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
- dataproc
Cluster string - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
- dataproc_
cluster str - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
- dataproc
Cluster String - Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
SessionTemplateJupyterSession, SessionTemplateJupyterSessionArgs
- Display
Name string - Display name, shown in the Jupyter kernelspec card.
- Kernel string
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
- Display
Name string - Display name, shown in the Jupyter kernelspec card.
- Kernel string
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
- display
Name String - Display name, shown in the Jupyter kernelspec card.
- kernel String
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
- display
Name string - Display name, shown in the Jupyter kernelspec card.
- kernel string
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
- display_
name str - Display name, shown in the Jupyter kernelspec card.
- kernel str
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
- display
Name String - Display name, shown in the Jupyter kernelspec card.
- kernel String
- Kernel to be used with Jupyter interactive session.
Possible values are:
PYTHON
,SCALA
.
SessionTemplateRuntimeConfig, SessionTemplateRuntimeConfigArgs
- Container
Image string - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- Effective
Properties Dictionary<string, string> - (Output) A mapping of property names to values, which are used to configure workload execution.
- Properties Dictionary<string, string>
- A mapping of property names to values, which are used to configure workload execution.
- Version string
- Version of the session runtime.
- Container
Image string - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- Effective
Properties map[string]string - (Output) A mapping of property names to values, which are used to configure workload execution.
- Properties map[string]string
- A mapping of property names to values, which are used to configure workload execution.
- Version string
- Version of the session runtime.
- container
Image String - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- effective
Properties Map<String,String> - (Output) A mapping of property names to values, which are used to configure workload execution.
- properties Map<String,String>
- A mapping of property names to values, which are used to configure workload execution.
- version String
- Version of the session runtime.
- container
Image string - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- effective
Properties {[key: string]: string} - (Output) A mapping of property names to values, which are used to configure workload execution.
- properties {[key: string]: string}
- A mapping of property names to values, which are used to configure workload execution.
- version string
- Version of the session runtime.
- container_
image str - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- effective_
properties Mapping[str, str] - (Output) A mapping of property names to values, which are used to configure workload execution.
- properties Mapping[str, str]
- A mapping of property names to values, which are used to configure workload execution.
- version str
- Version of the session runtime.
- container
Image String - Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
- effective
Properties Map<String> - (Output) A mapping of property names to values, which are used to configure workload execution.
- properties Map<String>
- A mapping of property names to values, which are used to configure workload execution.
- version String
- Version of the session runtime.
Import
SessionTemplate can be imported using any of these accepted formats:
{{name}}
When using the pulumi import
command, SessionTemplate can be imported using one of the formats above. For example:
$ pulumi import gcp:dataproc/sessionTemplate:SessionTemplate default {{name}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
google-beta
Terraform Provider.