1. Packages
  2. Google Cloud (GCP) Classic
  3. API Docs
  4. dataproc
  5. SessionTemplate
Google Cloud v8.36.0 published on Friday, Jun 27, 2025 by Pulumi

gcp.dataproc.SessionTemplate

Explore with Pulumi AI

gcp logo
Google Cloud v8.36.0 published on Friday, Jun 27, 2025 by Pulumi

    A Dataproc Serverless session template defines the configuration settings for creating one or more Dataproc Serverless interactive sessions.

    To get more information about SessionTemplate, see:

    Example Usage

    Dataproc Session Templates Jupyter

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const exampleSessionTemplatesJupyter = new gcp.dataproc.SessionTemplate("example_session_templates_jupyter", {
        name: "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
        location: "us-central1",
        labels: {
            session_template_test: "terraform",
        },
        runtimeConfig: {
            properties: {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
        },
        environmentConfig: {
            executionConfig: {
                subnetworkUri: "default",
                ttl: "3600s",
                networkTags: ["tag1"],
            },
        },
        jupyterSession: {
            kernel: "PYTHON",
            displayName: "tf python kernel",
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    example_session_templates_jupyter = gcp.dataproc.SessionTemplate("example_session_templates_jupyter",
        name="projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
        location="us-central1",
        labels={
            "session_template_test": "terraform",
        },
        runtime_config={
            "properties": {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
        },
        environment_config={
            "execution_config": {
                "subnetwork_uri": "default",
                "ttl": "3600s",
                "network_tags": ["tag1"],
            },
        },
        jupyter_session={
            "kernel": "PYTHON",
            "display_name": "tf python kernel",
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataproc.NewSessionTemplate(ctx, "example_session_templates_jupyter", &dataproc.SessionTemplateArgs{
    			Name:     pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template"),
    			Location: pulumi.String("us-central1"),
    			Labels: pulumi.StringMap{
    				"session_template_test": pulumi.String("terraform"),
    			},
    			RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
    				Properties: pulumi.StringMap{
    					"spark.dynamicAllocation.enabled": pulumi.String("false"),
    					"spark.executor.instances":        pulumi.String("2"),
    				},
    			},
    			EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
    				ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
    					SubnetworkUri: pulumi.String("default"),
    					Ttl:           pulumi.String("3600s"),
    					NetworkTags: pulumi.StringArray{
    						pulumi.String("tag1"),
    					},
    				},
    			},
    			JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
    				Kernel:      pulumi.String("PYTHON"),
    				DisplayName: pulumi.String("tf python kernel"),
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleSessionTemplatesJupyter = new Gcp.Dataproc.SessionTemplate("example_session_templates_jupyter", new()
        {
            Name = "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
            Location = "us-central1",
            Labels = 
            {
                { "session_template_test", "terraform" },
            },
            RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
            {
                Properties = 
                {
                    { "spark.dynamicAllocation.enabled", "false" },
                    { "spark.executor.instances", "2" },
                },
            },
            EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
            {
                ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
                {
                    SubnetworkUri = "default",
                    Ttl = "3600s",
                    NetworkTags = new[]
                    {
                        "tag1",
                    },
                },
            },
            JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
            {
                Kernel = "PYTHON",
                DisplayName = "tf python kernel",
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataproc.SessionTemplate;
    import com.pulumi.gcp.dataproc.SessionTemplateArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateJupyterSessionArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleSessionTemplatesJupyter = new SessionTemplate("exampleSessionTemplatesJupyter", SessionTemplateArgs.builder()
                .name("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template")
                .location("us-central1")
                .labels(Map.of("session_template_test", "terraform"))
                .runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
                    .properties(Map.ofEntries(
                        Map.entry("spark.dynamicAllocation.enabled", "false"),
                        Map.entry("spark.executor.instances", "2")
                    ))
                    .build())
                .environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
                    .executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
                        .subnetworkUri("default")
                        .ttl("3600s")
                        .networkTags("tag1")
                        .build())
                    .build())
                .jupyterSession(SessionTemplateJupyterSessionArgs.builder()
                    .kernel("PYTHON")
                    .displayName("tf python kernel")
                    .build())
                .build());
    
        }
    }
    
    resources:
      exampleSessionTemplatesJupyter:
        type: gcp:dataproc:SessionTemplate
        name: example_session_templates_jupyter
        properties:
          name: projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template
          location: us-central1
          labels:
            session_template_test: terraform
          runtimeConfig:
            properties:
              spark.dynamicAllocation.enabled: 'false'
              spark.executor.instances: '2'
          environmentConfig:
            executionConfig:
              subnetworkUri: default
              ttl: 3600s
              networkTags:
                - tag1
          jupyterSession:
            kernel: PYTHON
            displayName: tf python kernel
    

    Dataproc Session Templates Jupyter Full

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const project = gcp.organizations.getProject({});
    const gcsAccount = gcp.storage.getProjectServiceAccount({});
    const bucket = new gcp.storage.Bucket("bucket", {
        uniformBucketLevelAccess: true,
        name: "dataproc-bucket",
        location: "US",
        forceDestroy: true,
    });
    const cryptoKeyMember1 = new gcp.kms.CryptoKeyIAMMember("crypto_key_member_1", {
        cryptoKeyId: "example-key",
        role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
        member: project.then(project => `serviceAccount:service-${project.number}@dataproc-accounts.iam.gserviceaccount.com`),
    });
    const ms = new gcp.dataproc.MetastoreService("ms", {
        serviceId: "jupyter-session-template",
        location: "us-central1",
        port: 9080,
        tier: "DEVELOPER",
        maintenanceWindow: {
            hourOfDay: 2,
            dayOfWeek: "SUNDAY",
        },
        hiveMetastoreConfig: {
            version: "3.1.2",
        },
        networkConfig: {
            consumers: [{
                subnetwork: "projects/my-project-name/regions/us-central1/subnetworks/default",
            }],
        },
    });
    const basic = new gcp.dataproc.Cluster("basic", {
        name: "jupyter-session-template",
        region: "us-central1",
        clusterConfig: {
            softwareConfig: {
                overrideProperties: {
                    "dataproc:dataproc.allow.zero.workers": "true",
                    "spark:spark.history.fs.logDirectory": pulumi.interpolate`gs://${bucket.name}/*/spark-job-history`,
                },
            },
            gceClusterConfig: {
                subnetwork: "default",
            },
            endpointConfig: {
                enableHttpPortAccess: true,
            },
            masterConfig: {
                numInstances: 1,
                machineType: "e2-standard-2",
                diskConfig: {
                    bootDiskSizeGb: 35,
                },
            },
            metastoreConfig: {
                dataprocMetastoreService: ms.name,
            },
        },
    });
    const dataprocSessionTemplatesJupyterFull = new gcp.dataproc.SessionTemplate("dataproc_session_templates_jupyter_full", {
        name: "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
        location: "us-central1",
        labels: {
            session_template_test: "terraform",
        },
        runtimeConfig: {
            properties: {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
            version: "2.2",
            containerImage: "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
        },
        environmentConfig: {
            executionConfig: {
                ttl: "3600s",
                networkTags: ["tag1"],
                kmsKey: "example-key",
                subnetworkUri: "default",
                serviceAccount: project.then(project => `${project.number}-compute@developer.gserviceaccount.com`),
                stagingBucket: bucket.name,
            },
            peripheralsConfig: {
                metastoreService: ms.name,
                sparkHistoryServerConfig: {
                    dataprocCluster: basic.id,
                },
            },
        },
        jupyterSession: {
            kernel: "PYTHON",
            displayName: "tf python kernel",
        },
    }, {
        dependsOn: [cryptoKeyMember1],
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    project = gcp.organizations.get_project()
    gcs_account = gcp.storage.get_project_service_account()
    bucket = gcp.storage.Bucket("bucket",
        uniform_bucket_level_access=True,
        name="dataproc-bucket",
        location="US",
        force_destroy=True)
    crypto_key_member1 = gcp.kms.CryptoKeyIAMMember("crypto_key_member_1",
        crypto_key_id="example-key",
        role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
        member=f"serviceAccount:service-{project.number}@dataproc-accounts.iam.gserviceaccount.com")
    ms = gcp.dataproc.MetastoreService("ms",
        service_id="jupyter-session-template",
        location="us-central1",
        port=9080,
        tier="DEVELOPER",
        maintenance_window={
            "hour_of_day": 2,
            "day_of_week": "SUNDAY",
        },
        hive_metastore_config={
            "version": "3.1.2",
        },
        network_config={
            "consumers": [{
                "subnetwork": "projects/my-project-name/regions/us-central1/subnetworks/default",
            }],
        })
    basic = gcp.dataproc.Cluster("basic",
        name="jupyter-session-template",
        region="us-central1",
        cluster_config={
            "software_config": {
                "override_properties": {
                    "dataproc:dataproc.allow.zero.workers": "true",
                    "spark:spark.history.fs.logDirectory": bucket.name.apply(lambda name: f"gs://{name}/*/spark-job-history"),
                },
            },
            "gce_cluster_config": {
                "subnetwork": "default",
            },
            "endpoint_config": {
                "enable_http_port_access": True,
            },
            "master_config": {
                "num_instances": 1,
                "machine_type": "e2-standard-2",
                "disk_config": {
                    "boot_disk_size_gb": 35,
                },
            },
            "metastore_config": {
                "dataproc_metastore_service": ms.name,
            },
        })
    dataproc_session_templates_jupyter_full = gcp.dataproc.SessionTemplate("dataproc_session_templates_jupyter_full",
        name="projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
        location="us-central1",
        labels={
            "session_template_test": "terraform",
        },
        runtime_config={
            "properties": {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
            "version": "2.2",
            "container_image": "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
        },
        environment_config={
            "execution_config": {
                "ttl": "3600s",
                "network_tags": ["tag1"],
                "kms_key": "example-key",
                "subnetwork_uri": "default",
                "service_account": f"{project.number}-compute@developer.gserviceaccount.com",
                "staging_bucket": bucket.name,
            },
            "peripherals_config": {
                "metastore_service": ms.name,
                "spark_history_server_config": {
                    "dataproc_cluster": basic.id,
                },
            },
        },
        jupyter_session={
            "kernel": "PYTHON",
            "display_name": "tf python kernel",
        },
        opts = pulumi.ResourceOptions(depends_on=[crypto_key_member1]))
    
    package main
    
    import (
    	"fmt"
    
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/organizations"
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/storage"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		project, err := organizations.LookupProject(ctx, &organizations.LookupProjectArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		_, err = storage.GetProjectServiceAccount(ctx, &storage.GetProjectServiceAccountArgs{}, nil)
    		if err != nil {
    			return err
    		}
    		bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
    			UniformBucketLevelAccess: pulumi.Bool(true),
    			Name:                     pulumi.String("dataproc-bucket"),
    			Location:                 pulumi.String("US"),
    			ForceDestroy:             pulumi.Bool(true),
    		})
    		if err != nil {
    			return err
    		}
    		cryptoKeyMember1, err := kms.NewCryptoKeyIAMMember(ctx, "crypto_key_member_1", &kms.CryptoKeyIAMMemberArgs{
    			CryptoKeyId: pulumi.String("example-key"),
    			Role:        pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
    			Member:      pulumi.Sprintf("serviceAccount:service-%v@dataproc-accounts.iam.gserviceaccount.com", project.Number),
    		})
    		if err != nil {
    			return err
    		}
    		ms, err := dataproc.NewMetastoreService(ctx, "ms", &dataproc.MetastoreServiceArgs{
    			ServiceId: pulumi.String("jupyter-session-template"),
    			Location:  pulumi.String("us-central1"),
    			Port:      pulumi.Int(9080),
    			Tier:      pulumi.String("DEVELOPER"),
    			MaintenanceWindow: &dataproc.MetastoreServiceMaintenanceWindowArgs{
    				HourOfDay: pulumi.Int(2),
    				DayOfWeek: pulumi.String("SUNDAY"),
    			},
    			HiveMetastoreConfig: &dataproc.MetastoreServiceHiveMetastoreConfigArgs{
    				Version: pulumi.String("3.1.2"),
    			},
    			NetworkConfig: &dataproc.MetastoreServiceNetworkConfigArgs{
    				Consumers: dataproc.MetastoreServiceNetworkConfigConsumerArray{
    					&dataproc.MetastoreServiceNetworkConfigConsumerArgs{
    						Subnetwork: pulumi.String("projects/my-project-name/regions/us-central1/subnetworks/default"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		basic, err := dataproc.NewCluster(ctx, "basic", &dataproc.ClusterArgs{
    			Name:   pulumi.String("jupyter-session-template"),
    			Region: pulumi.String("us-central1"),
    			ClusterConfig: &dataproc.ClusterClusterConfigArgs{
    				SoftwareConfig: &dataproc.ClusterClusterConfigSoftwareConfigArgs{
    					OverrideProperties: pulumi.StringMap{
    						"dataproc:dataproc.allow.zero.workers": pulumi.String("true"),
    						"spark:spark.history.fs.logDirectory": bucket.Name.ApplyT(func(name string) (string, error) {
    							return fmt.Sprintf("gs://%v/*/spark-job-history", name), nil
    						}).(pulumi.StringOutput),
    					},
    				},
    				GceClusterConfig: &dataproc.ClusterClusterConfigGceClusterConfigArgs{
    					Subnetwork: pulumi.String("default"),
    				},
    				EndpointConfig: &dataproc.ClusterClusterConfigEndpointConfigArgs{
    					EnableHttpPortAccess: pulumi.Bool(true),
    				},
    				MasterConfig: &dataproc.ClusterClusterConfigMasterConfigArgs{
    					NumInstances: pulumi.Int(1),
    					MachineType:  pulumi.String("e2-standard-2"),
    					DiskConfig: &dataproc.ClusterClusterConfigMasterConfigDiskConfigArgs{
    						BootDiskSizeGb: pulumi.Int(35),
    					},
    				},
    				MetastoreConfig: &dataproc.ClusterClusterConfigMetastoreConfigArgs{
    					DataprocMetastoreService: ms.Name,
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		_, err = dataproc.NewSessionTemplate(ctx, "dataproc_session_templates_jupyter_full", &dataproc.SessionTemplateArgs{
    			Name:     pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template"),
    			Location: pulumi.String("us-central1"),
    			Labels: pulumi.StringMap{
    				"session_template_test": pulumi.String("terraform"),
    			},
    			RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
    				Properties: pulumi.StringMap{
    					"spark.dynamicAllocation.enabled": pulumi.String("false"),
    					"spark.executor.instances":        pulumi.String("2"),
    				},
    				Version:        pulumi.String("2.2"),
    				ContainerImage: pulumi.String("us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest"),
    			},
    			EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
    				ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
    					Ttl: pulumi.String("3600s"),
    					NetworkTags: pulumi.StringArray{
    						pulumi.String("tag1"),
    					},
    					KmsKey:         pulumi.String("example-key"),
    					SubnetworkUri:  pulumi.String("default"),
    					ServiceAccount: pulumi.Sprintf("%v-compute@developer.gserviceaccount.com", project.Number),
    					StagingBucket:  bucket.Name,
    				},
    				PeripheralsConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigArgs{
    					MetastoreService: ms.Name,
    					SparkHistoryServerConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs{
    						DataprocCluster: basic.ID(),
    					},
    				},
    			},
    			JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
    				Kernel:      pulumi.String("PYTHON"),
    				DisplayName: pulumi.String("tf python kernel"),
    			},
    		}, pulumi.DependsOn([]pulumi.Resource{
    			cryptoKeyMember1,
    		}))
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var project = Gcp.Organizations.GetProject.Invoke();
    
        var gcsAccount = Gcp.Storage.GetProjectServiceAccount.Invoke();
    
        var bucket = new Gcp.Storage.Bucket("bucket", new()
        {
            UniformBucketLevelAccess = true,
            Name = "dataproc-bucket",
            Location = "US",
            ForceDestroy = true,
        });
    
        var cryptoKeyMember1 = new Gcp.Kms.CryptoKeyIAMMember("crypto_key_member_1", new()
        {
            CryptoKeyId = "example-key",
            Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
            Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@dataproc-accounts.iam.gserviceaccount.com",
        });
    
        var ms = new Gcp.Dataproc.MetastoreService("ms", new()
        {
            ServiceId = "jupyter-session-template",
            Location = "us-central1",
            Port = 9080,
            Tier = "DEVELOPER",
            MaintenanceWindow = new Gcp.Dataproc.Inputs.MetastoreServiceMaintenanceWindowArgs
            {
                HourOfDay = 2,
                DayOfWeek = "SUNDAY",
            },
            HiveMetastoreConfig = new Gcp.Dataproc.Inputs.MetastoreServiceHiveMetastoreConfigArgs
            {
                Version = "3.1.2",
            },
            NetworkConfig = new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigArgs
            {
                Consumers = new[]
                {
                    new Gcp.Dataproc.Inputs.MetastoreServiceNetworkConfigConsumerArgs
                    {
                        Subnetwork = "projects/my-project-name/regions/us-central1/subnetworks/default",
                    },
                },
            },
        });
    
        var basic = new Gcp.Dataproc.Cluster("basic", new()
        {
            Name = "jupyter-session-template",
            Region = "us-central1",
            ClusterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigArgs
            {
                SoftwareConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigSoftwareConfigArgs
                {
                    OverrideProperties = 
                    {
                        { "dataproc:dataproc.allow.zero.workers", "true" },
                        { "spark:spark.history.fs.logDirectory", bucket.Name.Apply(name => $"gs://{name}/*/spark-job-history") },
                    },
                },
                GceClusterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigGceClusterConfigArgs
                {
                    Subnetwork = "default",
                },
                EndpointConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigEndpointConfigArgs
                {
                    EnableHttpPortAccess = true,
                },
                MasterConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigArgs
                {
                    NumInstances = 1,
                    MachineType = "e2-standard-2",
                    DiskConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMasterConfigDiskConfigArgs
                    {
                        BootDiskSizeGb = 35,
                    },
                },
                MetastoreConfig = new Gcp.Dataproc.Inputs.ClusterClusterConfigMetastoreConfigArgs
                {
                    DataprocMetastoreService = ms.Name,
                },
            },
        });
    
        var dataprocSessionTemplatesJupyterFull = new Gcp.Dataproc.SessionTemplate("dataproc_session_templates_jupyter_full", new()
        {
            Name = "projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template",
            Location = "us-central1",
            Labels = 
            {
                { "session_template_test", "terraform" },
            },
            RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
            {
                Properties = 
                {
                    { "spark.dynamicAllocation.enabled", "false" },
                    { "spark.executor.instances", "2" },
                },
                Version = "2.2",
                ContainerImage = "us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest",
            },
            EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
            {
                ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
                {
                    Ttl = "3600s",
                    NetworkTags = new[]
                    {
                        "tag1",
                    },
                    KmsKey = "example-key",
                    SubnetworkUri = "default",
                    ServiceAccount = $"{project.Apply(getProjectResult => getProjectResult.Number)}-compute@developer.gserviceaccount.com",
                    StagingBucket = bucket.Name,
                },
                PeripheralsConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs
                {
                    MetastoreService = ms.Name,
                    SparkHistoryServerConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs
                    {
                        DataprocCluster = basic.Id,
                    },
                },
            },
            JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
            {
                Kernel = "PYTHON",
                DisplayName = "tf python kernel",
            },
        }, new CustomResourceOptions
        {
            DependsOn =
            {
                cryptoKeyMember1,
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.organizations.OrganizationsFunctions;
    import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
    import com.pulumi.gcp.storage.StorageFunctions;
    import com.pulumi.gcp.storage.inputs.GetProjectServiceAccountArgs;
    import com.pulumi.gcp.storage.Bucket;
    import com.pulumi.gcp.storage.BucketArgs;
    import com.pulumi.gcp.kms.CryptoKeyIAMMember;
    import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
    import com.pulumi.gcp.dataproc.MetastoreService;
    import com.pulumi.gcp.dataproc.MetastoreServiceArgs;
    import com.pulumi.gcp.dataproc.inputs.MetastoreServiceMaintenanceWindowArgs;
    import com.pulumi.gcp.dataproc.inputs.MetastoreServiceHiveMetastoreConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.MetastoreServiceNetworkConfigArgs;
    import com.pulumi.gcp.dataproc.Cluster;
    import com.pulumi.gcp.dataproc.ClusterArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigSoftwareConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigGceClusterConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigEndpointConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMasterConfigDiskConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.ClusterClusterConfigMetastoreConfigArgs;
    import com.pulumi.gcp.dataproc.SessionTemplate;
    import com.pulumi.gcp.dataproc.SessionTemplateArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateJupyterSessionArgs;
    import com.pulumi.resources.CustomResourceOptions;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var project = OrganizationsFunctions.getProject(GetProjectArgs.builder()
                .build());
    
            final var gcsAccount = StorageFunctions.getProjectServiceAccount(GetProjectServiceAccountArgs.builder()
                .build());
    
            var bucket = new Bucket("bucket", BucketArgs.builder()
                .uniformBucketLevelAccess(true)
                .name("dataproc-bucket")
                .location("US")
                .forceDestroy(true)
                .build());
    
            var cryptoKeyMember1 = new CryptoKeyIAMMember("cryptoKeyMember1", CryptoKeyIAMMemberArgs.builder()
                .cryptoKeyId("example-key")
                .role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
                .member(String.format("serviceAccount:service-%s@dataproc-accounts.iam.gserviceaccount.com", project.number()))
                .build());
    
            var ms = new MetastoreService("ms", MetastoreServiceArgs.builder()
                .serviceId("jupyter-session-template")
                .location("us-central1")
                .port(9080)
                .tier("DEVELOPER")
                .maintenanceWindow(MetastoreServiceMaintenanceWindowArgs.builder()
                    .hourOfDay(2)
                    .dayOfWeek("SUNDAY")
                    .build())
                .hiveMetastoreConfig(MetastoreServiceHiveMetastoreConfigArgs.builder()
                    .version("3.1.2")
                    .build())
                .networkConfig(MetastoreServiceNetworkConfigArgs.builder()
                    .consumers(MetastoreServiceNetworkConfigConsumerArgs.builder()
                        .subnetwork("projects/my-project-name/regions/us-central1/subnetworks/default")
                        .build())
                    .build())
                .build());
    
            var basic = new Cluster("basic", ClusterArgs.builder()
                .name("jupyter-session-template")
                .region("us-central1")
                .clusterConfig(ClusterClusterConfigArgs.builder()
                    .softwareConfig(ClusterClusterConfigSoftwareConfigArgs.builder()
                        .overrideProperties(Map.ofEntries(
                            Map.entry("dataproc:dataproc.allow.zero.workers", "true"),
                            Map.entry("spark:spark.history.fs.logDirectory", bucket.name().applyValue(_name -> String.format("gs://%s/*/spark-job-history", _name)))
                        ))
                        .build())
                    .gceClusterConfig(ClusterClusterConfigGceClusterConfigArgs.builder()
                        .subnetwork("default")
                        .build())
                    .endpointConfig(ClusterClusterConfigEndpointConfigArgs.builder()
                        .enableHttpPortAccess(true)
                        .build())
                    .masterConfig(ClusterClusterConfigMasterConfigArgs.builder()
                        .numInstances(1)
                        .machineType("e2-standard-2")
                        .diskConfig(ClusterClusterConfigMasterConfigDiskConfigArgs.builder()
                            .bootDiskSizeGb(35)
                            .build())
                        .build())
                    .metastoreConfig(ClusterClusterConfigMetastoreConfigArgs.builder()
                        .dataprocMetastoreService(ms.name())
                        .build())
                    .build())
                .build());
    
            var dataprocSessionTemplatesJupyterFull = new SessionTemplate("dataprocSessionTemplatesJupyterFull", SessionTemplateArgs.builder()
                .name("projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template")
                .location("us-central1")
                .labels(Map.of("session_template_test", "terraform"))
                .runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
                    .properties(Map.ofEntries(
                        Map.entry("spark.dynamicAllocation.enabled", "false"),
                        Map.entry("spark.executor.instances", "2")
                    ))
                    .version("2.2")
                    .containerImage("us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest")
                    .build())
                .environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
                    .executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
                        .ttl("3600s")
                        .networkTags("tag1")
                        .kmsKey("example-key")
                        .subnetworkUri("default")
                        .serviceAccount(String.format("%s-compute@developer.gserviceaccount.com", project.number()))
                        .stagingBucket(bucket.name())
                        .build())
                    .peripheralsConfig(SessionTemplateEnvironmentConfigPeripheralsConfigArgs.builder()
                        .metastoreService(ms.name())
                        .sparkHistoryServerConfig(SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs.builder()
                            .dataprocCluster(basic.id())
                            .build())
                        .build())
                    .build())
                .jupyterSession(SessionTemplateJupyterSessionArgs.builder()
                    .kernel("PYTHON")
                    .displayName("tf python kernel")
                    .build())
                .build(), CustomResourceOptions.builder()
                    .dependsOn(cryptoKeyMember1)
                    .build());
    
        }
    }
    
    resources:
      dataprocSessionTemplatesJupyterFull:
        type: gcp:dataproc:SessionTemplate
        name: dataproc_session_templates_jupyter_full
        properties:
          name: projects/my-project-name/locations/us-central1/sessionTemplates/jupyter-session-template
          location: us-central1
          labels:
            session_template_test: terraform
          runtimeConfig:
            properties:
              spark.dynamicAllocation.enabled: 'false'
              spark.executor.instances: '2'
            version: '2.2'
            containerImage: us-docker.pkg.dev/my-project-name/s8s-spark-test-images/s8s-spark:latest
          environmentConfig:
            executionConfig:
              ttl: 3600s
              networkTags:
                - tag1
              kmsKey: example-key
              subnetworkUri: default
              serviceAccount: ${project.number}-compute@developer.gserviceaccount.com
              stagingBucket: ${bucket.name}
            peripheralsConfig:
              metastoreService: ${ms.name}
              sparkHistoryServerConfig:
                dataprocCluster: ${basic.id}
          jupyterSession:
            kernel: PYTHON
            displayName: tf python kernel
        options:
          dependsOn:
            - ${cryptoKeyMember1}
      bucket:
        type: gcp:storage:Bucket
        properties:
          uniformBucketLevelAccess: true
          name: dataproc-bucket
          location: US
          forceDestroy: true
      cryptoKeyMember1:
        type: gcp:kms:CryptoKeyIAMMember
        name: crypto_key_member_1
        properties:
          cryptoKeyId: example-key
          role: roles/cloudkms.cryptoKeyEncrypterDecrypter
          member: serviceAccount:service-${project.number}@dataproc-accounts.iam.gserviceaccount.com
      basic:
        type: gcp:dataproc:Cluster
        properties:
          name: jupyter-session-template
          region: us-central1
          clusterConfig:
            softwareConfig:
              overrideProperties:
                dataproc:dataproc.allow.zero.workers: 'true'
                spark:spark.history.fs.logDirectory: gs://${bucket.name}/*/spark-job-history
            gceClusterConfig:
              subnetwork: default
            endpointConfig:
              enableHttpPortAccess: true
            masterConfig:
              numInstances: 1
              machineType: e2-standard-2
              diskConfig:
                bootDiskSizeGb: 35
            metastoreConfig:
              dataprocMetastoreService: ${ms.name}
      ms:
        type: gcp:dataproc:MetastoreService
        properties:
          serviceId: jupyter-session-template
          location: us-central1
          port: 9080
          tier: DEVELOPER
          maintenanceWindow:
            hourOfDay: 2
            dayOfWeek: SUNDAY
          hiveMetastoreConfig:
            version: 3.1.2
          networkConfig:
            consumers:
              - subnetwork: projects/my-project-name/regions/us-central1/subnetworks/default
    variables:
      project:
        fn::invoke:
          function: gcp:organizations:getProject
          arguments: {}
      gcsAccount:
        fn::invoke:
          function: gcp:storage:getProjectServiceAccount
          arguments: {}
    

    Dataproc Session Templates Spark Connect

    import * as pulumi from "@pulumi/pulumi";
    import * as gcp from "@pulumi/gcp";
    
    const exampleSessionTemplatesSparkConnect = new gcp.dataproc.SessionTemplate("example_session_templates_spark_connect", {
        name: "projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
        location: "us-central1",
        labels: {
            session_template_test: "terraform",
        },
        runtimeConfig: {
            properties: {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
        },
        environmentConfig: {
            executionConfig: {
                subnetworkUri: "default",
                ttl: "3600s",
                networkTags: ["tag1"],
            },
        },
    });
    
    import pulumi
    import pulumi_gcp as gcp
    
    example_session_templates_spark_connect = gcp.dataproc.SessionTemplate("example_session_templates_spark_connect",
        name="projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
        location="us-central1",
        labels={
            "session_template_test": "terraform",
        },
        runtime_config={
            "properties": {
                "spark.dynamicAllocation.enabled": "false",
                "spark.executor.instances": "2",
            },
        },
        environment_config={
            "execution_config": {
                "subnetwork_uri": "default",
                "ttl": "3600s",
                "network_tags": ["tag1"],
            },
        })
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := dataproc.NewSessionTemplate(ctx, "example_session_templates_spark_connect", &dataproc.SessionTemplateArgs{
    			Name:     pulumi.String("projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template"),
    			Location: pulumi.String("us-central1"),
    			Labels: pulumi.StringMap{
    				"session_template_test": pulumi.String("terraform"),
    			},
    			RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
    				Properties: pulumi.StringMap{
    					"spark.dynamicAllocation.enabled": pulumi.String("false"),
    					"spark.executor.instances":        pulumi.String("2"),
    				},
    			},
    			EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
    				ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
    					SubnetworkUri: pulumi.String("default"),
    					Ttl:           pulumi.String("3600s"),
    					NetworkTags: pulumi.StringArray{
    						pulumi.String("tag1"),
    					},
    				},
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Gcp = Pulumi.Gcp;
    
    return await Deployment.RunAsync(() => 
    {
        var exampleSessionTemplatesSparkConnect = new Gcp.Dataproc.SessionTemplate("example_session_templates_spark_connect", new()
        {
            Name = "projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template",
            Location = "us-central1",
            Labels = 
            {
                { "session_template_test", "terraform" },
            },
            RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
            {
                Properties = 
                {
                    { "spark.dynamicAllocation.enabled", "false" },
                    { "spark.executor.instances", "2" },
                },
            },
            EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
            {
                ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
                {
                    SubnetworkUri = "default",
                    Ttl = "3600s",
                    NetworkTags = new[]
                    {
                        "tag1",
                    },
                },
            },
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.gcp.dataproc.SessionTemplate;
    import com.pulumi.gcp.dataproc.SessionTemplateArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateRuntimeConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigArgs;
    import com.pulumi.gcp.dataproc.inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleSessionTemplatesSparkConnect = new SessionTemplate("exampleSessionTemplatesSparkConnect", SessionTemplateArgs.builder()
                .name("projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template")
                .location("us-central1")
                .labels(Map.of("session_template_test", "terraform"))
                .runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
                    .properties(Map.ofEntries(
                        Map.entry("spark.dynamicAllocation.enabled", "false"),
                        Map.entry("spark.executor.instances", "2")
                    ))
                    .build())
                .environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
                    .executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
                        .subnetworkUri("default")
                        .ttl("3600s")
                        .networkTags("tag1")
                        .build())
                    .build())
                .build());
    
        }
    }
    
    resources:
      exampleSessionTemplatesSparkConnect:
        type: gcp:dataproc:SessionTemplate
        name: example_session_templates_spark_connect
        properties:
          name: projects/my-project-name/locations/us-central1/sessionTemplates/sc-session-template
          location: us-central1
          labels:
            session_template_test: terraform
          runtimeConfig:
            properties:
              spark.dynamicAllocation.enabled: 'false'
              spark.executor.instances: '2'
          environmentConfig:
            executionConfig:
              subnetworkUri: default
              ttl: 3600s
              networkTags:
                - tag1
    

    Create SessionTemplate Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new SessionTemplate(name: string, args?: SessionTemplateArgs, opts?: CustomResourceOptions);
    @overload
    def SessionTemplate(resource_name: str,
                        args: Optional[SessionTemplateArgs] = None,
                        opts: Optional[ResourceOptions] = None)
    
    @overload
    def SessionTemplate(resource_name: str,
                        opts: Optional[ResourceOptions] = None,
                        environment_config: Optional[SessionTemplateEnvironmentConfigArgs] = None,
                        jupyter_session: Optional[SessionTemplateJupyterSessionArgs] = None,
                        labels: Optional[Mapping[str, str]] = None,
                        location: Optional[str] = None,
                        name: Optional[str] = None,
                        project: Optional[str] = None,
                        runtime_config: Optional[SessionTemplateRuntimeConfigArgs] = None,
                        spark_connect_session: Optional[SessionTemplateSparkConnectSessionArgs] = None)
    func NewSessionTemplate(ctx *Context, name string, args *SessionTemplateArgs, opts ...ResourceOption) (*SessionTemplate, error)
    public SessionTemplate(string name, SessionTemplateArgs? args = null, CustomResourceOptions? opts = null)
    public SessionTemplate(String name, SessionTemplateArgs args)
    public SessionTemplate(String name, SessionTemplateArgs args, CustomResourceOptions options)
    
    type: gcp:dataproc:SessionTemplate
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args SessionTemplateArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args SessionTemplateArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args SessionTemplateArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args SessionTemplateArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args SessionTemplateArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var sessionTemplateResource = new Gcp.Dataproc.SessionTemplate("sessionTemplateResource", new()
    {
        EnvironmentConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigArgs
        {
            ExecutionConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigExecutionConfigArgs
            {
                KmsKey = "string",
                NetworkTags = new[]
                {
                    "string",
                },
                ServiceAccount = "string",
                StagingBucket = "string",
                SubnetworkUri = "string",
                Ttl = "string",
            },
            PeripheralsConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigArgs
            {
                MetastoreService = "string",
                SparkHistoryServerConfig = new Gcp.Dataproc.Inputs.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs
                {
                    DataprocCluster = "string",
                },
            },
        },
        JupyterSession = new Gcp.Dataproc.Inputs.SessionTemplateJupyterSessionArgs
        {
            DisplayName = "string",
            Kernel = "string",
        },
        Labels = 
        {
            { "string", "string" },
        },
        Location = "string",
        Name = "string",
        Project = "string",
        RuntimeConfig = new Gcp.Dataproc.Inputs.SessionTemplateRuntimeConfigArgs
        {
            ContainerImage = "string",
            EffectiveProperties = 
            {
                { "string", "string" },
            },
            Properties = 
            {
                { "string", "string" },
            },
            Version = "string",
        },
        SparkConnectSession = null,
    });
    
    example, err := dataproc.NewSessionTemplate(ctx, "sessionTemplateResource", &dataproc.SessionTemplateArgs{
    	EnvironmentConfig: &dataproc.SessionTemplateEnvironmentConfigArgs{
    		ExecutionConfig: &dataproc.SessionTemplateEnvironmentConfigExecutionConfigArgs{
    			KmsKey: pulumi.String("string"),
    			NetworkTags: pulumi.StringArray{
    				pulumi.String("string"),
    			},
    			ServiceAccount: pulumi.String("string"),
    			StagingBucket:  pulumi.String("string"),
    			SubnetworkUri:  pulumi.String("string"),
    			Ttl:            pulumi.String("string"),
    		},
    		PeripheralsConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigArgs{
    			MetastoreService: pulumi.String("string"),
    			SparkHistoryServerConfig: &dataproc.SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs{
    				DataprocCluster: pulumi.String("string"),
    			},
    		},
    	},
    	JupyterSession: &dataproc.SessionTemplateJupyterSessionArgs{
    		DisplayName: pulumi.String("string"),
    		Kernel:      pulumi.String("string"),
    	},
    	Labels: pulumi.StringMap{
    		"string": pulumi.String("string"),
    	},
    	Location: pulumi.String("string"),
    	Name:     pulumi.String("string"),
    	Project:  pulumi.String("string"),
    	RuntimeConfig: &dataproc.SessionTemplateRuntimeConfigArgs{
    		ContainerImage: pulumi.String("string"),
    		EffectiveProperties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		Properties: pulumi.StringMap{
    			"string": pulumi.String("string"),
    		},
    		Version: pulumi.String("string"),
    	},
    	SparkConnectSession: &dataproc.SessionTemplateSparkConnectSessionArgs{},
    })
    
    var sessionTemplateResource = new SessionTemplate("sessionTemplateResource", SessionTemplateArgs.builder()
        .environmentConfig(SessionTemplateEnvironmentConfigArgs.builder()
            .executionConfig(SessionTemplateEnvironmentConfigExecutionConfigArgs.builder()
                .kmsKey("string")
                .networkTags("string")
                .serviceAccount("string")
                .stagingBucket("string")
                .subnetworkUri("string")
                .ttl("string")
                .build())
            .peripheralsConfig(SessionTemplateEnvironmentConfigPeripheralsConfigArgs.builder()
                .metastoreService("string")
                .sparkHistoryServerConfig(SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs.builder()
                    .dataprocCluster("string")
                    .build())
                .build())
            .build())
        .jupyterSession(SessionTemplateJupyterSessionArgs.builder()
            .displayName("string")
            .kernel("string")
            .build())
        .labels(Map.of("string", "string"))
        .location("string")
        .name("string")
        .project("string")
        .runtimeConfig(SessionTemplateRuntimeConfigArgs.builder()
            .containerImage("string")
            .effectiveProperties(Map.of("string", "string"))
            .properties(Map.of("string", "string"))
            .version("string")
            .build())
        .sparkConnectSession(SessionTemplateSparkConnectSessionArgs.builder()
            .build())
        .build());
    
    session_template_resource = gcp.dataproc.SessionTemplate("sessionTemplateResource",
        environment_config={
            "execution_config": {
                "kms_key": "string",
                "network_tags": ["string"],
                "service_account": "string",
                "staging_bucket": "string",
                "subnetwork_uri": "string",
                "ttl": "string",
            },
            "peripherals_config": {
                "metastore_service": "string",
                "spark_history_server_config": {
                    "dataproc_cluster": "string",
                },
            },
        },
        jupyter_session={
            "display_name": "string",
            "kernel": "string",
        },
        labels={
            "string": "string",
        },
        location="string",
        name="string",
        project="string",
        runtime_config={
            "container_image": "string",
            "effective_properties": {
                "string": "string",
            },
            "properties": {
                "string": "string",
            },
            "version": "string",
        },
        spark_connect_session={})
    
    const sessionTemplateResource = new gcp.dataproc.SessionTemplate("sessionTemplateResource", {
        environmentConfig: {
            executionConfig: {
                kmsKey: "string",
                networkTags: ["string"],
                serviceAccount: "string",
                stagingBucket: "string",
                subnetworkUri: "string",
                ttl: "string",
            },
            peripheralsConfig: {
                metastoreService: "string",
                sparkHistoryServerConfig: {
                    dataprocCluster: "string",
                },
            },
        },
        jupyterSession: {
            displayName: "string",
            kernel: "string",
        },
        labels: {
            string: "string",
        },
        location: "string",
        name: "string",
        project: "string",
        runtimeConfig: {
            containerImage: "string",
            effectiveProperties: {
                string: "string",
            },
            properties: {
                string: "string",
            },
            version: "string",
        },
        sparkConnectSession: {},
    });
    
    type: gcp:dataproc:SessionTemplate
    properties:
        environmentConfig:
            executionConfig:
                kmsKey: string
                networkTags:
                    - string
                serviceAccount: string
                stagingBucket: string
                subnetworkUri: string
                ttl: string
            peripheralsConfig:
                metastoreService: string
                sparkHistoryServerConfig:
                    dataprocCluster: string
        jupyterSession:
            displayName: string
            kernel: string
        labels:
            string: string
        location: string
        name: string
        project: string
        runtimeConfig:
            containerImage: string
            effectiveProperties:
                string: string
            properties:
                string: string
            version: string
        sparkConnectSession: {}
    

    SessionTemplate Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The SessionTemplate resource accepts the following input properties:

    EnvironmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    JupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    Labels Dictionary<string, string>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string
    The location in which the session template will be created in.
    Name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    RuntimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    SparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    EnvironmentConfig SessionTemplateEnvironmentConfigArgs
    Environment configuration for the session execution. Structure is documented below.
    JupyterSession SessionTemplateJupyterSessionArgs
    Jupyter configuration for an interactive session. Structure is documented below.
    Labels map[string]string

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string
    The location in which the session template will be created in.
    Name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    RuntimeConfig SessionTemplateRuntimeConfigArgs
    Runtime configuration for the session template. Structure is documented below.
    SparkConnectSession SessionTemplateSparkConnectSessionArgs
    Spark connect configuration for an interactive session.
    environmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Map<String,String>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String
    The location in which the session template will be created in.
    name String
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    runtimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    environmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    labels {[key: string]: string}

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location string
    The location in which the session template will be created in.
    name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    runtimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    environment_config SessionTemplateEnvironmentConfigArgs
    Environment configuration for the session execution. Structure is documented below.
    jupyter_session SessionTemplateJupyterSessionArgs
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Mapping[str, str]

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location str
    The location in which the session template will be created in.
    name str
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project str
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    runtime_config SessionTemplateRuntimeConfigArgs
    Runtime configuration for the session template. Structure is documented below.
    spark_connect_session SessionTemplateSparkConnectSessionArgs
    Spark connect configuration for an interactive session.
    environmentConfig Property Map
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession Property Map
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Map<String>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String
    The location in which the session template will be created in.
    name String
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    runtimeConfig Property Map
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession Property Map
    Spark connect configuration for an interactive session.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the SessionTemplate resource produces the following output properties:

    CreateTime string
    The time when the session template was created.
    Creator string
    The email address of the user who created the session template.
    EffectiveLabels Dictionary<string, string>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Id string
    The provider-assigned unique ID for this managed resource.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    UpdateTime string
    The time when the session template was updated.
    Uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    CreateTime string
    The time when the session template was created.
    Creator string
    The email address of the user who created the session template.
    EffectiveLabels map[string]string
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    Id string
    The provider-assigned unique ID for this managed resource.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    UpdateTime string
    The time when the session template was updated.
    Uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime String
    The time when the session template was created.
    creator String
    The email address of the user who created the session template.
    effectiveLabels Map<String,String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    id String
    The provider-assigned unique ID for this managed resource.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    updateTime String
    The time when the session template was updated.
    uuid String
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime string
    The time when the session template was created.
    creator string
    The email address of the user who created the session template.
    effectiveLabels {[key: string]: string}
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    id string
    The provider-assigned unique ID for this managed resource.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    updateTime string
    The time when the session template was updated.
    uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    create_time str
    The time when the session template was created.
    creator str
    The email address of the user who created the session template.
    effective_labels Mapping[str, str]
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    id str
    The provider-assigned unique ID for this managed resource.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    update_time str
    The time when the session template was updated.
    uuid str
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime String
    The time when the session template was created.
    creator String
    The email address of the user who created the session template.
    effectiveLabels Map<String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    id String
    The provider-assigned unique ID for this managed resource.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    updateTime String
    The time when the session template was updated.
    uuid String
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.

    Look up Existing SessionTemplate Resource

    Get an existing SessionTemplate resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: SessionTemplateState, opts?: CustomResourceOptions): SessionTemplate
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            create_time: Optional[str] = None,
            creator: Optional[str] = None,
            effective_labels: Optional[Mapping[str, str]] = None,
            environment_config: Optional[SessionTemplateEnvironmentConfigArgs] = None,
            jupyter_session: Optional[SessionTemplateJupyterSessionArgs] = None,
            labels: Optional[Mapping[str, str]] = None,
            location: Optional[str] = None,
            name: Optional[str] = None,
            project: Optional[str] = None,
            pulumi_labels: Optional[Mapping[str, str]] = None,
            runtime_config: Optional[SessionTemplateRuntimeConfigArgs] = None,
            spark_connect_session: Optional[SessionTemplateSparkConnectSessionArgs] = None,
            update_time: Optional[str] = None,
            uuid: Optional[str] = None) -> SessionTemplate
    func GetSessionTemplate(ctx *Context, name string, id IDInput, state *SessionTemplateState, opts ...ResourceOption) (*SessionTemplate, error)
    public static SessionTemplate Get(string name, Input<string> id, SessionTemplateState? state, CustomResourceOptions? opts = null)
    public static SessionTemplate get(String name, Output<String> id, SessionTemplateState state, CustomResourceOptions options)
    resources:  _:    type: gcp:dataproc:SessionTemplate    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    CreateTime string
    The time when the session template was created.
    Creator string
    The email address of the user who created the session template.
    EffectiveLabels Dictionary<string, string>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    EnvironmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    JupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    Labels Dictionary<string, string>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string
    The location in which the session template will be created in.
    Name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    PulumiLabels Dictionary<string, string>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    RuntimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    SparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    UpdateTime string
    The time when the session template was updated.
    Uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    CreateTime string
    The time when the session template was created.
    Creator string
    The email address of the user who created the session template.
    EffectiveLabels map[string]string
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    EnvironmentConfig SessionTemplateEnvironmentConfigArgs
    Environment configuration for the session execution. Structure is documented below.
    JupyterSession SessionTemplateJupyterSessionArgs
    Jupyter configuration for an interactive session. Structure is documented below.
    Labels map[string]string

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    Location string
    The location in which the session template will be created in.
    Name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    Project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    PulumiLabels map[string]string
    The combination of labels configured directly on the resource and default labels configured on the provider.
    RuntimeConfig SessionTemplateRuntimeConfigArgs
    Runtime configuration for the session template. Structure is documented below.
    SparkConnectSession SessionTemplateSparkConnectSessionArgs
    Spark connect configuration for an interactive session.
    UpdateTime string
    The time when the session template was updated.
    Uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime String
    The time when the session template was created.
    creator String
    The email address of the user who created the session template.
    effectiveLabels Map<String,String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    environmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Map<String,String>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String
    The location in which the session template will be created in.
    name String
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels Map<String,String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    runtimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    updateTime String
    The time when the session template was updated.
    uuid String
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime string
    The time when the session template was created.
    creator string
    The email address of the user who created the session template.
    effectiveLabels {[key: string]: string}
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    environmentConfig SessionTemplateEnvironmentConfig
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession SessionTemplateJupyterSession
    Jupyter configuration for an interactive session. Structure is documented below.
    labels {[key: string]: string}

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location string
    The location in which the session template will be created in.
    name string
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project string
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels {[key: string]: string}
    The combination of labels configured directly on the resource and default labels configured on the provider.
    runtimeConfig SessionTemplateRuntimeConfig
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession SessionTemplateSparkConnectSession
    Spark connect configuration for an interactive session.
    updateTime string
    The time when the session template was updated.
    uuid string
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    create_time str
    The time when the session template was created.
    creator str
    The email address of the user who created the session template.
    effective_labels Mapping[str, str]
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    environment_config SessionTemplateEnvironmentConfigArgs
    Environment configuration for the session execution. Structure is documented below.
    jupyter_session SessionTemplateJupyterSessionArgs
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Mapping[str, str]

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location str
    The location in which the session template will be created in.
    name str
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project str
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumi_labels Mapping[str, str]
    The combination of labels configured directly on the resource and default labels configured on the provider.
    runtime_config SessionTemplateRuntimeConfigArgs
    Runtime configuration for the session template. Structure is documented below.
    spark_connect_session SessionTemplateSparkConnectSessionArgs
    Spark connect configuration for an interactive session.
    update_time str
    The time when the session template was updated.
    uuid str
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.
    createTime String
    The time when the session template was created.
    creator String
    The email address of the user who created the session template.
    effectiveLabels Map<String>
    All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
    environmentConfig Property Map
    Environment configuration for the session execution. Structure is documented below.
    jupyterSession Property Map
    Jupyter configuration for an interactive session. Structure is documented below.
    labels Map<String>

    The labels to associate with this session template.

    Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field effective_labels for all of the labels present on the resource.

    location String
    The location in which the session template will be created in.
    name String
    The resource name of the session template in the following format: projects/{project}/locations/{location}/sessionTemplates/{template_id}


    project String
    The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
    pulumiLabels Map<String>
    The combination of labels configured directly on the resource and default labels configured on the provider.
    runtimeConfig Property Map
    Runtime configuration for the session template. Structure is documented below.
    sparkConnectSession Property Map
    Spark connect configuration for an interactive session.
    updateTime String
    The time when the session template was updated.
    uuid String
    A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template.

    Supporting Types

    SessionTemplateEnvironmentConfig, SessionTemplateEnvironmentConfigArgs

    ExecutionConfig SessionTemplateEnvironmentConfigExecutionConfig
    Execution configuration for a workload. Structure is documented below.
    PeripheralsConfig SessionTemplateEnvironmentConfigPeripheralsConfig
    Peripherals configuration that workload has access to. Structure is documented below.
    ExecutionConfig SessionTemplateEnvironmentConfigExecutionConfig
    Execution configuration for a workload. Structure is documented below.
    PeripheralsConfig SessionTemplateEnvironmentConfigPeripheralsConfig
    Peripherals configuration that workload has access to. Structure is documented below.
    executionConfig SessionTemplateEnvironmentConfigExecutionConfig
    Execution configuration for a workload. Structure is documented below.
    peripheralsConfig SessionTemplateEnvironmentConfigPeripheralsConfig
    Peripherals configuration that workload has access to. Structure is documented below.
    executionConfig SessionTemplateEnvironmentConfigExecutionConfig
    Execution configuration for a workload. Structure is documented below.
    peripheralsConfig SessionTemplateEnvironmentConfigPeripheralsConfig
    Peripherals configuration that workload has access to. Structure is documented below.
    execution_config SessionTemplateEnvironmentConfigExecutionConfig
    Execution configuration for a workload. Structure is documented below.
    peripherals_config SessionTemplateEnvironmentConfigPeripheralsConfig
    Peripherals configuration that workload has access to. Structure is documented below.
    executionConfig Property Map
    Execution configuration for a workload. Structure is documented below.
    peripheralsConfig Property Map
    Peripherals configuration that workload has access to. Structure is documented below.

    SessionTemplateEnvironmentConfigExecutionConfig, SessionTemplateEnvironmentConfigExecutionConfigArgs

    KmsKey string
    The Cloud KMS key to use for encryption.
    NetworkTags List<string>
    Tags used for network traffic control.
    ServiceAccount string
    Service account that used to execute workload.
    StagingBucket string
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    SubnetworkUri string
    Subnetwork configuration for workload execution.
    Ttl string
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
    KmsKey string
    The Cloud KMS key to use for encryption.
    NetworkTags []string
    Tags used for network traffic control.
    ServiceAccount string
    Service account that used to execute workload.
    StagingBucket string
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    SubnetworkUri string
    Subnetwork configuration for workload execution.
    Ttl string
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
    kmsKey String
    The Cloud KMS key to use for encryption.
    networkTags List<String>
    Tags used for network traffic control.
    serviceAccount String
    Service account that used to execute workload.
    stagingBucket String
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    subnetworkUri String
    Subnetwork configuration for workload execution.
    ttl String
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
    kmsKey string
    The Cloud KMS key to use for encryption.
    networkTags string[]
    Tags used for network traffic control.
    serviceAccount string
    Service account that used to execute workload.
    stagingBucket string
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    subnetworkUri string
    Subnetwork configuration for workload execution.
    ttl string
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
    kms_key str
    The Cloud KMS key to use for encryption.
    network_tags Sequence[str]
    Tags used for network traffic control.
    service_account str
    Service account that used to execute workload.
    staging_bucket str
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    subnetwork_uri str
    Subnetwork configuration for workload execution.
    ttl str
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.
    kmsKey String
    The Cloud KMS key to use for encryption.
    networkTags List<String>
    Tags used for network traffic control.
    serviceAccount String
    Service account that used to execute workload.
    stagingBucket String
    A Cloud Storage bucket used to stage workload dependencies, config files, and store workload output and other ephemeral data, such as Spark history files. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location according to the region where your workload is running, and then create and manage project-level, per-location staging and temporary buckets. This field requires a Cloud Storage bucket name, not a gs://... URI to a Cloud Storage bucket.
    subnetworkUri String
    Subnetwork configuration for workload execution.
    ttl String
    The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a session workload, the workload will be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idleTtl are specified (for an interactive session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idleTtl or when ttl has been exceeded, whichever occurs first.

    SessionTemplateEnvironmentConfigPeripheralsConfig, SessionTemplateEnvironmentConfigPeripheralsConfigArgs

    MetastoreService string
    Resource name of an existing Dataproc Metastore service.
    SparkHistoryServerConfig SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig
    The Spark History Server configuration for the workload. Structure is documented below.
    MetastoreService string
    Resource name of an existing Dataproc Metastore service.
    SparkHistoryServerConfig SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig
    The Spark History Server configuration for the workload. Structure is documented below.
    metastoreService String
    Resource name of an existing Dataproc Metastore service.
    sparkHistoryServerConfig SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig
    The Spark History Server configuration for the workload. Structure is documented below.
    metastoreService string
    Resource name of an existing Dataproc Metastore service.
    sparkHistoryServerConfig SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig
    The Spark History Server configuration for the workload. Structure is documented below.
    metastore_service str
    Resource name of an existing Dataproc Metastore service.
    spark_history_server_config SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig
    The Spark History Server configuration for the workload. Structure is documented below.
    metastoreService String
    Resource name of an existing Dataproc Metastore service.
    sparkHistoryServerConfig Property Map
    The Spark History Server configuration for the workload. Structure is documented below.

    SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfig, SessionTemplateEnvironmentConfigPeripheralsConfigSparkHistoryServerConfigArgs

    DataprocCluster string
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
    DataprocCluster string
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
    dataprocCluster String
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
    dataprocCluster string
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
    dataproc_cluster str
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.
    dataprocCluster String
    Resource name of an existing Dataproc Cluster to act as a Spark History Server for the workload.

    SessionTemplateJupyterSession, SessionTemplateJupyterSessionArgs

    DisplayName string
    Display name, shown in the Jupyter kernelspec card.
    Kernel string
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.
    DisplayName string
    Display name, shown in the Jupyter kernelspec card.
    Kernel string
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.
    displayName String
    Display name, shown in the Jupyter kernelspec card.
    kernel String
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.
    displayName string
    Display name, shown in the Jupyter kernelspec card.
    kernel string
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.
    display_name str
    Display name, shown in the Jupyter kernelspec card.
    kernel str
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.
    displayName String
    Display name, shown in the Jupyter kernelspec card.
    kernel String
    Kernel to be used with Jupyter interactive session. Possible values are: PYTHON, SCALA.

    SessionTemplateRuntimeConfig, SessionTemplateRuntimeConfigArgs

    ContainerImage string
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    EffectiveProperties Dictionary<string, string>
    (Output) A mapping of property names to values, which are used to configure workload execution.
    Properties Dictionary<string, string>
    A mapping of property names to values, which are used to configure workload execution.
    Version string
    Version of the session runtime.
    ContainerImage string
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    EffectiveProperties map[string]string
    (Output) A mapping of property names to values, which are used to configure workload execution.
    Properties map[string]string
    A mapping of property names to values, which are used to configure workload execution.
    Version string
    Version of the session runtime.
    containerImage String
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    effectiveProperties Map<String,String>
    (Output) A mapping of property names to values, which are used to configure workload execution.
    properties Map<String,String>
    A mapping of property names to values, which are used to configure workload execution.
    version String
    Version of the session runtime.
    containerImage string
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    effectiveProperties {[key: string]: string}
    (Output) A mapping of property names to values, which are used to configure workload execution.
    properties {[key: string]: string}
    A mapping of property names to values, which are used to configure workload execution.
    version string
    Version of the session runtime.
    container_image str
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    effective_properties Mapping[str, str]
    (Output) A mapping of property names to values, which are used to configure workload execution.
    properties Mapping[str, str]
    A mapping of property names to values, which are used to configure workload execution.
    version str
    Version of the session runtime.
    containerImage String
    Optional custom container image for the job runtime environment. If not specified, a default container image will be used.
    effectiveProperties Map<String>
    (Output) A mapping of property names to values, which are used to configure workload execution.
    properties Map<String>
    A mapping of property names to values, which are used to configure workload execution.
    version String
    Version of the session runtime.

    Import

    SessionTemplate can be imported using any of these accepted formats:

    • {{name}}

    When using the pulumi import command, SessionTemplate can be imported using one of the formats above. For example:

    $ pulumi import gcp:dataproc/sessionTemplate:SessionTemplate default {{name}}
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Google Cloud (GCP) Classic pulumi/pulumi-gcp
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the google-beta Terraform Provider.
    gcp logo
    Google Cloud v8.36.0 published on Friday, Jun 27, 2025 by Pulumi