gcp.dataproc.GdcSparkApplication
Explore with Pulumi AI
A Spark application is a single Spark workload run on a GDC cluster.
To get more information about SparkApplication, see:
- API documentation
- How-to Guides
Example Usage
Dataprocgdc Sparkapplication Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-spark-app-basic",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
sparkApplicationConfig: {
mainClass: "org.apache.spark.examples.SparkPi",
jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
args: ["10000"],
},
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-spark-app-basic",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
spark_application_config={
"main_class": "org.apache.spark.examples.SparkPi",
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"args": ["10000"],
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-spark-app-basic"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
MainClass: pulumi.String("org.apache.spark.examples.SparkPi"),
JarFileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
Args: pulumi.StringArray{
pulumi.String("10000"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-spark-app-basic",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
{
MainClass = "org.apache.spark.examples.SparkPi",
JarFileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
Args = new[]
{
"10000",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-spark-app-basic")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
.mainClass("org.apache.spark.examples.SparkPi")
.jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.args("10000")
.build())
.build());
}
}
resources:
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-spark-app-basic
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
sparkApplicationConfig:
mainClass: org.apache.spark.examples.SparkPi
jarFileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
args:
- '10000'
Dataprocgdc Sparkapplication
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const appEnv = new gcp.dataproc.GdcApplicationEnvironment("app_env", {
applicationEnvironmentId: "tf-e2e-spark-app-env",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
});
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-spark-app",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
labels: {
"test-label": "label-value",
},
annotations: {
an_annotation: "annotation_value",
},
properties: {
"spark.executor.instances": "2",
},
applicationEnvironment: appEnv.name,
version: "1.2",
sparkApplicationConfig: {
mainJarFileUri: "file:///usr/lib/spark/examples/jars/spark-examples.jar",
jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
fileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
},
});
import pulumi
import pulumi_gcp as gcp
app_env = gcp.dataproc.GdcApplicationEnvironment("app_env",
application_environment_id="tf-e2e-spark-app-env",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default")
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-spark-app",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
labels={
"test-label": "label-value",
},
annotations={
"an_annotation": "annotation_value",
},
properties={
"spark.executor.instances": "2",
},
application_environment=app_env.name,
version="1.2",
spark_application_config={
"main_jar_file_uri": "file:///usr/lib/spark/examples/jars/spark-examples.jar",
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
"file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
appEnv, err := dataproc.NewGdcApplicationEnvironment(ctx, "app_env", &dataproc.GdcApplicationEnvironmentArgs{
ApplicationEnvironmentId: pulumi.String("tf-e2e-spark-app-env"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
})
if err != nil {
return err
}
_, err = dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-spark-app"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
Labels: pulumi.StringMap{
"test-label": pulumi.String("label-value"),
},
Annotations: pulumi.StringMap{
"an_annotation": pulumi.String("annotation_value"),
},
Properties: pulumi.StringMap{
"spark.executor.instances": pulumi.String("2"),
},
ApplicationEnvironment: appEnv.Name,
Version: pulumi.String("1.2"),
SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
MainJarFileUri: pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
JarFileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
ArchiveUris: pulumi.StringArray{
pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
},
FileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var appEnv = new Gcp.Dataproc.GdcApplicationEnvironment("app_env", new()
{
ApplicationEnvironmentId = "tf-e2e-spark-app-env",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
});
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-spark-app",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
Labels =
{
{ "test-label", "label-value" },
},
Annotations =
{
{ "an_annotation", "annotation_value" },
},
Properties =
{
{ "spark.executor.instances", "2" },
},
ApplicationEnvironment = appEnv.Name,
Version = "1.2",
SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
{
MainJarFileUri = "file:///usr/lib/spark/examples/jars/spark-examples.jar",
JarFileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
ArchiveUris = new[]
{
"file://usr/lib/spark/examples/spark-examples.jar",
},
FileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var appEnv = new GdcApplicationEnvironment("appEnv", GdcApplicationEnvironmentArgs.builder()
.applicationEnvironmentId("tf-e2e-spark-app-env")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.build());
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-spark-app")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.labels(Map.of("test-label", "label-value"))
.annotations(Map.of("an_annotation", "annotation_value"))
.properties(Map.of("spark.executor.instances", "2"))
.applicationEnvironment(appEnv.name())
.version("1.2")
.sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
.mainJarFileUri("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
.fileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.build())
.build());
}
}
resources:
appEnv:
type: gcp:dataproc:GdcApplicationEnvironment
name: app_env
properties:
applicationEnvironmentId: tf-e2e-spark-app-env
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-spark-app
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
labels:
test-label: label-value
annotations:
an_annotation: annotation_value
properties:
spark.executor.instances: '2'
applicationEnvironment: ${appEnv.name}
version: '1.2'
sparkApplicationConfig:
mainJarFileUri: file:///usr/lib/spark/examples/jars/spark-examples.jar
jarFileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
archiveUris:
- file://usr/lib/spark/examples/spark-examples.jar
fileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
Dataprocgdc Sparkapplication Pyspark
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-pyspark-app",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
displayName: "A Pyspark application for a Terraform create test",
dependencyImages: ["gcr.io/some/image"],
pysparkApplicationConfig: {
mainPythonFileUri: "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
pythonFileUris: ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"],
fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
args: ["10"],
},
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-pyspark-app",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
display_name="A Pyspark application for a Terraform create test",
dependency_images=["gcr.io/some/image"],
pyspark_application_config={
"main_python_file_uri": "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"python_file_uris": ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"],
"file_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
"archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
"args": ["10"],
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-pyspark-app"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
DisplayName: pulumi.String("A Pyspark application for a Terraform create test"),
DependencyImages: pulumi.StringArray{
pulumi.String("gcr.io/some/image"),
},
PysparkApplicationConfig: &dataproc.GdcSparkApplicationPysparkApplicationConfigArgs{
MainPythonFileUri: pulumi.String("gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py"),
JarFileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
PythonFileUris: pulumi.StringArray{
pulumi.String("gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"),
},
FileUris: pulumi.StringArray{
pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
},
ArchiveUris: pulumi.StringArray{
pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
},
Args: pulumi.StringArray{
pulumi.String("10"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-pyspark-app",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
DisplayName = "A Pyspark application for a Terraform create test",
DependencyImages = new[]
{
"gcr.io/some/image",
},
PysparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationPysparkApplicationConfigArgs
{
MainPythonFileUri = "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
JarFileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
PythonFileUris = new[]
{
"gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py",
},
FileUris = new[]
{
"file://usr/lib/spark/examples/spark-examples.jar",
},
ArchiveUris = new[]
{
"file://usr/lib/spark/examples/spark-examples.jar",
},
Args = new[]
{
"10",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationPysparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-pyspark-app")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.displayName("A Pyspark application for a Terraform create test")
.dependencyImages("gcr.io/some/image")
.pysparkApplicationConfig(GdcSparkApplicationPysparkApplicationConfigArgs.builder()
.mainPythonFileUri("gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py")
.jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.pythonFileUris("gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py")
.fileUris("file://usr/lib/spark/examples/spark-examples.jar")
.archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
.args("10")
.build())
.build());
}
}
resources:
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-pyspark-app
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
displayName: A Pyspark application for a Terraform create test
dependencyImages:
- gcr.io/some/image
pysparkApplicationConfig:
mainPythonFileUri: gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py
jarFileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
pythonFileUris:
- gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py
fileUris:
- file://usr/lib/spark/examples/spark-examples.jar
archiveUris:
- file://usr/lib/spark/examples/spark-examples.jar
args:
- '10'
Dataprocgdc Sparkapplication Sparkr
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-sparkr-app",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
displayName: "A SparkR application for a Terraform create test",
sparkRApplicationConfig: {
mainRFileUri: "gs://some-bucket/something.R",
fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
args: ["10"],
},
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-sparkr-app",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
display_name="A SparkR application for a Terraform create test",
spark_r_application_config={
"main_r_file_uri": "gs://some-bucket/something.R",
"file_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
"archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
"args": ["10"],
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-sparkr-app"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
DisplayName: pulumi.String("A SparkR application for a Terraform create test"),
SparkRApplicationConfig: &dataproc.GdcSparkApplicationSparkRApplicationConfigArgs{
MainRFileUri: pulumi.String("gs://some-bucket/something.R"),
FileUris: pulumi.StringArray{
pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
},
ArchiveUris: pulumi.StringArray{
pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
},
Args: pulumi.StringArray{
pulumi.String("10"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-sparkr-app",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
DisplayName = "A SparkR application for a Terraform create test",
SparkRApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkRApplicationConfigArgs
{
MainRFileUri = "gs://some-bucket/something.R",
FileUris = new[]
{
"file://usr/lib/spark/examples/spark-examples.jar",
},
ArchiveUris = new[]
{
"file://usr/lib/spark/examples/spark-examples.jar",
},
Args = new[]
{
"10",
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkRApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-sparkr-app")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.displayName("A SparkR application for a Terraform create test")
.sparkRApplicationConfig(GdcSparkApplicationSparkRApplicationConfigArgs.builder()
.mainRFileUri("gs://some-bucket/something.R")
.fileUris("file://usr/lib/spark/examples/spark-examples.jar")
.archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
.args("10")
.build())
.build());
}
}
resources:
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-sparkr-app
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
displayName: A SparkR application for a Terraform create test
sparkRApplicationConfig:
mainRFileUri: gs://some-bucket/something.R
fileUris:
- file://usr/lib/spark/examples/spark-examples.jar
archiveUris:
- file://usr/lib/spark/examples/spark-examples.jar
args:
- '10'
Dataprocgdc Sparkapplication Sparksql
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-sparksql-app",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
displayName: "A SparkSql application for a Terraform create test",
sparkSqlApplicationConfig: {
jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
queryList: {
queries: ["show tables;"],
},
scriptVariables: {
MY_VAR: "1",
},
},
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-sparksql-app",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
display_name="A SparkSql application for a Terraform create test",
spark_sql_application_config={
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"query_list": {
"queries": ["show tables;"],
},
"script_variables": {
"MY_VAR": "1",
},
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-sparksql-app"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
DisplayName: pulumi.String("A SparkSql application for a Terraform create test"),
SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
JarFileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
QueryList: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs{
Queries: pulumi.StringArray{
pulumi.String("show tables;"),
},
},
ScriptVariables: pulumi.StringMap{
"MY_VAR": pulumi.String("1"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-sparksql-app",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
DisplayName = "A SparkSql application for a Terraform create test",
SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
{
JarFileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
QueryList = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs
{
Queries = new[]
{
"show tables;",
},
},
ScriptVariables =
{
{ "MY_VAR", "1" },
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-sparksql-app")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.displayName("A SparkSql application for a Terraform create test")
.sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
.jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.queryList(GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs.builder()
.queries("show tables;")
.build())
.scriptVariables(Map.of("MY_VAR", "1"))
.build())
.build());
}
}
resources:
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-sparksql-app
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
displayName: A SparkSql application for a Terraform create test
sparkSqlApplicationConfig:
jarFileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
queryList:
queries:
- show tables;
scriptVariables:
MY_VAR: '1'
Dataprocgdc Sparkapplication Sparksql Query File
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
sparkApplicationId: "tf-e2e-sparksql-app",
serviceinstance: "do-not-delete-dataproc-gdc-instance",
project: "my-project",
location: "us-west2",
namespace: "default",
displayName: "A SparkSql application for a Terraform create test",
sparkSqlApplicationConfig: {
jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
queryFileUri: "gs://some-bucket/something.sql",
scriptVariables: {
MY_VAR: "1",
},
},
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
spark_application_id="tf-e2e-sparksql-app",
serviceinstance="do-not-delete-dataproc-gdc-instance",
project="my-project",
location="us-west2",
namespace="default",
display_name="A SparkSql application for a Terraform create test",
spark_sql_application_config={
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"query_file_uri": "gs://some-bucket/something.sql",
"script_variables": {
"MY_VAR": "1",
},
})
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
SparkApplicationId: pulumi.String("tf-e2e-sparksql-app"),
Serviceinstance: pulumi.String("do-not-delete-dataproc-gdc-instance"),
Project: pulumi.String("my-project"),
Location: pulumi.String("us-west2"),
Namespace: pulumi.String("default"),
DisplayName: pulumi.String("A SparkSql application for a Terraform create test"),
SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
JarFileUris: pulumi.StringArray{
pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
},
QueryFileUri: pulumi.String("gs://some-bucket/something.sql"),
ScriptVariables: pulumi.StringMap{
"MY_VAR": pulumi.String("1"),
},
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
{
SparkApplicationId = "tf-e2e-sparksql-app",
Serviceinstance = "do-not-delete-dataproc-gdc-instance",
Project = "my-project",
Location = "us-west2",
Namespace = "default",
DisplayName = "A SparkSql application for a Terraform create test",
SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
{
JarFileUris = new[]
{
"file:///usr/lib/spark/examples/jars/spark-examples.jar",
},
QueryFileUri = "gs://some-bucket/something.sql",
ScriptVariables =
{
{ "MY_VAR", "1" },
},
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
.sparkApplicationId("tf-e2e-sparksql-app")
.serviceinstance("do-not-delete-dataproc-gdc-instance")
.project("my-project")
.location("us-west2")
.namespace("default")
.displayName("A SparkSql application for a Terraform create test")
.sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
.jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
.queryFileUri("gs://some-bucket/something.sql")
.scriptVariables(Map.of("MY_VAR", "1"))
.build())
.build());
}
}
resources:
spark-application:
type: gcp:dataproc:GdcSparkApplication
properties:
sparkApplicationId: tf-e2e-sparksql-app
serviceinstance: do-not-delete-dataproc-gdc-instance
project: my-project
location: us-west2
namespace: default
displayName: A SparkSql application for a Terraform create test
sparkSqlApplicationConfig:
jarFileUris:
- file:///usr/lib/spark/examples/jars/spark-examples.jar
queryFileUri: gs://some-bucket/something.sql
scriptVariables:
MY_VAR: '1'
Create GdcSparkApplication Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new GdcSparkApplication(name: string, args: GdcSparkApplicationArgs, opts?: CustomResourceOptions);
@overload
def GdcSparkApplication(resource_name: str,
args: GdcSparkApplicationArgs,
opts: Optional[ResourceOptions] = None)
@overload
def GdcSparkApplication(resource_name: str,
opts: Optional[ResourceOptions] = None,
location: Optional[str] = None,
spark_application_id: Optional[str] = None,
serviceinstance: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
annotations: Optional[Mapping[str, str]] = None,
display_name: Optional[str] = None,
namespace: Optional[str] = None,
project: Optional[str] = None,
properties: Optional[Mapping[str, str]] = None,
pyspark_application_config: Optional[GdcSparkApplicationPysparkApplicationConfigArgs] = None,
dependency_images: Optional[Sequence[str]] = None,
spark_application_config: Optional[GdcSparkApplicationSparkApplicationConfigArgs] = None,
application_environment: Optional[str] = None,
spark_r_application_config: Optional[GdcSparkApplicationSparkRApplicationConfigArgs] = None,
spark_sql_application_config: Optional[GdcSparkApplicationSparkSqlApplicationConfigArgs] = None,
version: Optional[str] = None)
func NewGdcSparkApplication(ctx *Context, name string, args GdcSparkApplicationArgs, opts ...ResourceOption) (*GdcSparkApplication, error)
public GdcSparkApplication(string name, GdcSparkApplicationArgs args, CustomResourceOptions? opts = null)
public GdcSparkApplication(String name, GdcSparkApplicationArgs args)
public GdcSparkApplication(String name, GdcSparkApplicationArgs args, CustomResourceOptions options)
type: gcp:dataproc:GdcSparkApplication
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var gdcSparkApplicationResource = new Gcp.Dataproc.GdcSparkApplication("gdcSparkApplicationResource", new()
{
Location = "string",
SparkApplicationId = "string",
Serviceinstance = "string",
Labels =
{
{ "string", "string" },
},
Annotations =
{
{ "string", "string" },
},
DisplayName = "string",
Namespace = "string",
Project = "string",
Properties =
{
{ "string", "string" },
},
PysparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationPysparkApplicationConfigArgs
{
MainPythonFileUri = "string",
ArchiveUris = new[]
{
"string",
},
Args = new[]
{
"string",
},
FileUris = new[]
{
"string",
},
JarFileUris = new[]
{
"string",
},
PythonFileUris = new[]
{
"string",
},
},
DependencyImages = new[]
{
"string",
},
SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
{
ArchiveUris = new[]
{
"string",
},
Args = new[]
{
"string",
},
FileUris = new[]
{
"string",
},
JarFileUris = new[]
{
"string",
},
MainClass = "string",
MainJarFileUri = "string",
},
ApplicationEnvironment = "string",
SparkRApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkRApplicationConfigArgs
{
MainRFileUri = "string",
ArchiveUris = new[]
{
"string",
},
Args = new[]
{
"string",
},
FileUris = new[]
{
"string",
},
},
SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
{
JarFileUris = new[]
{
"string",
},
QueryFileUri = "string",
QueryList = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs
{
Queries = new[]
{
"string",
},
},
ScriptVariables =
{
{ "string", "string" },
},
},
Version = "string",
});
example, err := dataproc.NewGdcSparkApplication(ctx, "gdcSparkApplicationResource", &dataproc.GdcSparkApplicationArgs{
Location: pulumi.String("string"),
SparkApplicationId: pulumi.String("string"),
Serviceinstance: pulumi.String("string"),
Labels: pulumi.StringMap{
"string": pulumi.String("string"),
},
Annotations: pulumi.StringMap{
"string": pulumi.String("string"),
},
DisplayName: pulumi.String("string"),
Namespace: pulumi.String("string"),
Project: pulumi.String("string"),
Properties: pulumi.StringMap{
"string": pulumi.String("string"),
},
PysparkApplicationConfig: &dataproc.GdcSparkApplicationPysparkApplicationConfigArgs{
MainPythonFileUri: pulumi.String("string"),
ArchiveUris: pulumi.StringArray{
pulumi.String("string"),
},
Args: pulumi.StringArray{
pulumi.String("string"),
},
FileUris: pulumi.StringArray{
pulumi.String("string"),
},
JarFileUris: pulumi.StringArray{
pulumi.String("string"),
},
PythonFileUris: pulumi.StringArray{
pulumi.String("string"),
},
},
DependencyImages: pulumi.StringArray{
pulumi.String("string"),
},
SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
ArchiveUris: pulumi.StringArray{
pulumi.String("string"),
},
Args: pulumi.StringArray{
pulumi.String("string"),
},
FileUris: pulumi.StringArray{
pulumi.String("string"),
},
JarFileUris: pulumi.StringArray{
pulumi.String("string"),
},
MainClass: pulumi.String("string"),
MainJarFileUri: pulumi.String("string"),
},
ApplicationEnvironment: pulumi.String("string"),
SparkRApplicationConfig: &dataproc.GdcSparkApplicationSparkRApplicationConfigArgs{
MainRFileUri: pulumi.String("string"),
ArchiveUris: pulumi.StringArray{
pulumi.String("string"),
},
Args: pulumi.StringArray{
pulumi.String("string"),
},
FileUris: pulumi.StringArray{
pulumi.String("string"),
},
},
SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
JarFileUris: pulumi.StringArray{
pulumi.String("string"),
},
QueryFileUri: pulumi.String("string"),
QueryList: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs{
Queries: pulumi.StringArray{
pulumi.String("string"),
},
},
ScriptVariables: pulumi.StringMap{
"string": pulumi.String("string"),
},
},
Version: pulumi.String("string"),
})
var gdcSparkApplicationResource = new GdcSparkApplication("gdcSparkApplicationResource", GdcSparkApplicationArgs.builder()
.location("string")
.sparkApplicationId("string")
.serviceinstance("string")
.labels(Map.of("string", "string"))
.annotations(Map.of("string", "string"))
.displayName("string")
.namespace("string")
.project("string")
.properties(Map.of("string", "string"))
.pysparkApplicationConfig(GdcSparkApplicationPysparkApplicationConfigArgs.builder()
.mainPythonFileUri("string")
.archiveUris("string")
.args("string")
.fileUris("string")
.jarFileUris("string")
.pythonFileUris("string")
.build())
.dependencyImages("string")
.sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
.archiveUris("string")
.args("string")
.fileUris("string")
.jarFileUris("string")
.mainClass("string")
.mainJarFileUri("string")
.build())
.applicationEnvironment("string")
.sparkRApplicationConfig(GdcSparkApplicationSparkRApplicationConfigArgs.builder()
.mainRFileUri("string")
.archiveUris("string")
.args("string")
.fileUris("string")
.build())
.sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
.jarFileUris("string")
.queryFileUri("string")
.queryList(GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs.builder()
.queries("string")
.build())
.scriptVariables(Map.of("string", "string"))
.build())
.version("string")
.build());
gdc_spark_application_resource = gcp.dataproc.GdcSparkApplication("gdcSparkApplicationResource",
location="string",
spark_application_id="string",
serviceinstance="string",
labels={
"string": "string",
},
annotations={
"string": "string",
},
display_name="string",
namespace="string",
project="string",
properties={
"string": "string",
},
pyspark_application_config={
"main_python_file_uri": "string",
"archive_uris": ["string"],
"args": ["string"],
"file_uris": ["string"],
"jar_file_uris": ["string"],
"python_file_uris": ["string"],
},
dependency_images=["string"],
spark_application_config={
"archive_uris": ["string"],
"args": ["string"],
"file_uris": ["string"],
"jar_file_uris": ["string"],
"main_class": "string",
"main_jar_file_uri": "string",
},
application_environment="string",
spark_r_application_config={
"main_r_file_uri": "string",
"archive_uris": ["string"],
"args": ["string"],
"file_uris": ["string"],
},
spark_sql_application_config={
"jar_file_uris": ["string"],
"query_file_uri": "string",
"query_list": {
"queries": ["string"],
},
"script_variables": {
"string": "string",
},
},
version="string")
const gdcSparkApplicationResource = new gcp.dataproc.GdcSparkApplication("gdcSparkApplicationResource", {
location: "string",
sparkApplicationId: "string",
serviceinstance: "string",
labels: {
string: "string",
},
annotations: {
string: "string",
},
displayName: "string",
namespace: "string",
project: "string",
properties: {
string: "string",
},
pysparkApplicationConfig: {
mainPythonFileUri: "string",
archiveUris: ["string"],
args: ["string"],
fileUris: ["string"],
jarFileUris: ["string"],
pythonFileUris: ["string"],
},
dependencyImages: ["string"],
sparkApplicationConfig: {
archiveUris: ["string"],
args: ["string"],
fileUris: ["string"],
jarFileUris: ["string"],
mainClass: "string",
mainJarFileUri: "string",
},
applicationEnvironment: "string",
sparkRApplicationConfig: {
mainRFileUri: "string",
archiveUris: ["string"],
args: ["string"],
fileUris: ["string"],
},
sparkSqlApplicationConfig: {
jarFileUris: ["string"],
queryFileUri: "string",
queryList: {
queries: ["string"],
},
scriptVariables: {
string: "string",
},
},
version: "string",
});
type: gcp:dataproc:GdcSparkApplication
properties:
annotations:
string: string
applicationEnvironment: string
dependencyImages:
- string
displayName: string
labels:
string: string
location: string
namespace: string
project: string
properties:
string: string
pysparkApplicationConfig:
archiveUris:
- string
args:
- string
fileUris:
- string
jarFileUris:
- string
mainPythonFileUri: string
pythonFileUris:
- string
serviceinstance: string
sparkApplicationConfig:
archiveUris:
- string
args:
- string
fileUris:
- string
jarFileUris:
- string
mainClass: string
mainJarFileUri: string
sparkApplicationId: string
sparkRApplicationConfig:
archiveUris:
- string
args:
- string
fileUris:
- string
mainRFileUri: string
sparkSqlApplicationConfig:
jarFileUris:
- string
queryFileUri: string
queryList:
queries:
- string
scriptVariables:
string: string
version: string
GdcSparkApplication Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The GdcSparkApplication resource accepts the following input properties:
- Location string
- The location of the spark application.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- Spark
Application stringId - The id of the application
- Annotations Dictionary<string, string>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - Application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- Dependency
Images List<string> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- Display
Name string - User-provided human-readable name to be used in user interfaces.
- Labels Dictionary<string, string>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties Dictionary<string, string>
- application-specific properties.
- Pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- Spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- Spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- Spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- Version string
- The Dataproc version of this application.
- Location string
- The location of the spark application.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- Spark
Application stringId - The id of the application
- Annotations map[string]string
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - Application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- Dependency
Images []string - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- Display
Name string - User-provided human-readable name to be used in user interfaces.
- Labels map[string]string
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties map[string]string
- application-specific properties.
- Pyspark
Application GdcConfig Spark Application Pyspark Application Config Args - Represents the PySparkApplicationConfig. Structure is documented below.
- Spark
Application GdcConfig Spark Application Spark Application Config Args - Represents the SparkApplicationConfig. Structure is documented below.
- Spark
RApplication GdcConfig Spark Application Spark RApplication Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- Spark
Sql GdcApplication Config Spark Application Spark Sql Application Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- Version string
- The Dataproc version of this application.
- location String
- The location of the spark application.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- spark
Application StringId - The id of the application
- annotations Map<String,String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment String - An ApplicationEnvironment from which to inherit configuration properties.
- dependency
Images List<String> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name String - User-provided human-readable name to be used in user interfaces.
- labels Map<String,String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String,String>
- application-specific properties.
- pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- version String
- The Dataproc version of this application.
- location string
- The location of the spark application.
- serviceinstance string
- The id of the service instance to which this spark application belongs.
- spark
Application stringId - The id of the application
- annotations {[key: string]: string}
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- dependency
Images string[] - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name string - User-provided human-readable name to be used in user interfaces.
- labels {[key: string]: string}
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties {[key: string]: string}
- application-specific properties.
- pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- version string
- The Dataproc version of this application.
- location str
- The location of the spark application.
- serviceinstance str
- The id of the service instance to which this spark application belongs.
- spark_
application_ strid - The id of the application
- annotations Mapping[str, str]
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application_
environment str - An ApplicationEnvironment from which to inherit configuration properties.
- dependency_
images Sequence[str] - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display_
name str - User-provided human-readable name to be used in user interfaces.
- labels Mapping[str, str]
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - namespace str
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Mapping[str, str]
- application-specific properties.
- pyspark_
application_ Gdcconfig Spark Application Pyspark Application Config Args - Represents the PySparkApplicationConfig. Structure is documented below.
- spark_
application_ Gdcconfig Spark Application Spark Application Config Args - Represents the SparkApplicationConfig. Structure is documented below.
- spark_
r_ Gdcapplication_ config Spark Application Spark RApplication Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- spark_
sql_ Gdcapplication_ config Spark Application Spark Sql Application Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- version str
- The Dataproc version of this application.
- location String
- The location of the spark application.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- spark
Application StringId - The id of the application
- annotations Map<String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment String - An ApplicationEnvironment from which to inherit configuration properties.
- dependency
Images List<String> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name String - User-provided human-readable name to be used in user interfaces.
- labels Map<String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String>
- application-specific properties.
- pyspark
Application Property MapConfig - Represents the PySparkApplicationConfig. Structure is documented below.
- spark
Application Property MapConfig - Represents the SparkApplicationConfig. Structure is documented below.
- spark
RApplication Property MapConfig - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql Property MapApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- version String
- The Dataproc version of this application.
Outputs
All input properties are implicitly available as output properties. Additionally, the GdcSparkApplication resource produces the following output properties:
- Create
Time string - The timestamp when the resource was created.
- Effective
Annotations Dictionary<string, string> - Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- State string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- State
Message string - A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- Update
Time string - The timestamp when the resource was most recently updated.
- Create
Time string - The timestamp when the resource was created.
- Effective
Annotations map[string]string - Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- Monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- State string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- State
Message string - A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- Update
Time string - The timestamp when the resource was most recently updated.
- create
Time String - The timestamp when the resource was created.
- effective
Annotations Map<String,String> - effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- monitoring
Endpoint String - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- output
Uri String - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state String
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message String - A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- update
Time String - The timestamp when the resource was most recently updated.
- create
Time string - The timestamp when the resource was created.
- effective
Annotations {[key: string]: string} - effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id string
- The provider-assigned unique ID for this managed resource.
- monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message string - A message explaining the current state.
- uid string
- System generated unique identifier for this application, formatted as UUID4.
- update
Time string - The timestamp when the resource was most recently updated.
- create_
time str - The timestamp when the resource was created.
- effective_
annotations Mapping[str, str] - effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id str
- The provider-assigned unique ID for this managed resource.
- monitoring_
endpoint str - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name str
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- output_
uri str - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state str
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state_
message str - A message explaining the current state.
- uid str
- System generated unique identifier for this application, formatted as UUID4.
- update_
time str - The timestamp when the resource was most recently updated.
- create
Time String - The timestamp when the resource was created.
- effective
Annotations Map<String> - effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- monitoring
Endpoint String - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- output
Uri String - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state String
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message String - A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- update
Time String - The timestamp when the resource was most recently updated.
Look up Existing GdcSparkApplication Resource
Get an existing GdcSparkApplication resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: GdcSparkApplicationState, opts?: CustomResourceOptions): GdcSparkApplication
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
annotations: Optional[Mapping[str, str]] = None,
application_environment: Optional[str] = None,
create_time: Optional[str] = None,
dependency_images: Optional[Sequence[str]] = None,
display_name: Optional[str] = None,
effective_annotations: Optional[Mapping[str, str]] = None,
effective_labels: Optional[Mapping[str, str]] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
monitoring_endpoint: Optional[str] = None,
name: Optional[str] = None,
namespace: Optional[str] = None,
output_uri: Optional[str] = None,
project: Optional[str] = None,
properties: Optional[Mapping[str, str]] = None,
pulumi_labels: Optional[Mapping[str, str]] = None,
pyspark_application_config: Optional[GdcSparkApplicationPysparkApplicationConfigArgs] = None,
reconciling: Optional[bool] = None,
serviceinstance: Optional[str] = None,
spark_application_config: Optional[GdcSparkApplicationSparkApplicationConfigArgs] = None,
spark_application_id: Optional[str] = None,
spark_r_application_config: Optional[GdcSparkApplicationSparkRApplicationConfigArgs] = None,
spark_sql_application_config: Optional[GdcSparkApplicationSparkSqlApplicationConfigArgs] = None,
state: Optional[str] = None,
state_message: Optional[str] = None,
uid: Optional[str] = None,
update_time: Optional[str] = None,
version: Optional[str] = None) -> GdcSparkApplication
func GetGdcSparkApplication(ctx *Context, name string, id IDInput, state *GdcSparkApplicationState, opts ...ResourceOption) (*GdcSparkApplication, error)
public static GdcSparkApplication Get(string name, Input<string> id, GdcSparkApplicationState? state, CustomResourceOptions? opts = null)
public static GdcSparkApplication get(String name, Output<String> id, GdcSparkApplicationState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Annotations Dictionary<string, string>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - Application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- Create
Time string - The timestamp when the resource was created.
- Dependency
Images List<string> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- Display
Name string - User-provided human-readable name to be used in user interfaces.
- Effective
Annotations Dictionary<string, string> - Effective
Labels Dictionary<string, string> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels Dictionary<string, string>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - Location string
- The location of the spark application.
- Monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties Dictionary<string, string>
- application-specific properties.
- Pulumi
Labels Dictionary<string, string> - The combination of labels configured directly on the resource and default labels configured on the provider.
- Pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- Spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- Spark
Application stringId - The id of the application
- Spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- Spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- State string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- State
Message string - A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- Update
Time string - The timestamp when the resource was most recently updated.
- Version string
- The Dataproc version of this application.
- Annotations map[string]string
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - Application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- Create
Time string - The timestamp when the resource was created.
- Dependency
Images []string - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- Display
Name string - User-provided human-readable name to be used in user interfaces.
- Effective
Annotations map[string]string - Effective
Labels map[string]string - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels map[string]string
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - Location string
- The location of the spark application.
- Monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties map[string]string
- application-specific properties.
- Pulumi
Labels map[string]string - The combination of labels configured directly on the resource and default labels configured on the provider.
- Pyspark
Application GdcConfig Spark Application Pyspark Application Config Args - Represents the PySparkApplicationConfig. Structure is documented below.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- Spark
Application GdcConfig Spark Application Spark Application Config Args - Represents the SparkApplicationConfig. Structure is documented below.
- Spark
Application stringId - The id of the application
- Spark
RApplication GdcConfig Spark Application Spark RApplication Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- Spark
Sql GdcApplication Config Spark Application Spark Sql Application Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- State string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- State
Message string - A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- Update
Time string - The timestamp when the resource was most recently updated.
- Version string
- The Dataproc version of this application.
- annotations Map<String,String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment String - An ApplicationEnvironment from which to inherit configuration properties.
- create
Time String - The timestamp when the resource was created.
- dependency
Images List<String> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name String - User-provided human-readable name to be used in user interfaces.
- effective
Annotations Map<String,String> - effective
Labels Map<String,String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String,String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - location String
- The location of the spark application.
- monitoring
Endpoint String - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- output
Uri String - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String,String>
- application-specific properties.
- pulumi
Labels Map<String,String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- spark
Application StringId - The id of the application
- spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- state String
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message String - A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- update
Time String - The timestamp when the resource was most recently updated.
- version String
- The Dataproc version of this application.
- annotations {[key: string]: string}
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment string - An ApplicationEnvironment from which to inherit configuration properties.
- create
Time string - The timestamp when the resource was created.
- dependency
Images string[] - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name string - User-provided human-readable name to be used in user interfaces.
- effective
Annotations {[key: string]: string} - effective
Labels {[key: string]: string} - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels {[key: string]: string}
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - location string
- The location of the spark application.
- monitoring
Endpoint string - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- output
Uri string - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties {[key: string]: string}
- application-specific properties.
- pulumi
Labels {[key: string]: string} - The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark
Application GdcConfig Spark Application Pyspark Application Config - Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance string
- The id of the service instance to which this spark application belongs.
- spark
Application GdcConfig Spark Application Spark Application Config - Represents the SparkApplicationConfig. Structure is documented below.
- spark
Application stringId - The id of the application
- spark
RApplication GdcConfig Spark Application Spark RApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql GdcApplication Config Spark Application Spark Sql Application Config - Represents the SparkRApplicationConfig. Structure is documented below.
- state string
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message string - A message explaining the current state.
- uid string
- System generated unique identifier for this application, formatted as UUID4.
- update
Time string - The timestamp when the resource was most recently updated.
- version string
- The Dataproc version of this application.
- annotations Mapping[str, str]
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application_
environment str - An ApplicationEnvironment from which to inherit configuration properties.
- create_
time str - The timestamp when the resource was created.
- dependency_
images Sequence[str] - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display_
name str - User-provided human-readable name to be used in user interfaces.
- effective_
annotations Mapping[str, str] - effective_
labels Mapping[str, str] - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Mapping[str, str]
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - location str
- The location of the spark application.
- monitoring_
endpoint str - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name str
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace str
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- output_
uri str - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Mapping[str, str]
- application-specific properties.
- pulumi_
labels Mapping[str, str] - The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark_
application_ Gdcconfig Spark Application Pyspark Application Config Args - Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance str
- The id of the service instance to which this spark application belongs.
- spark_
application_ Gdcconfig Spark Application Spark Application Config Args - Represents the SparkApplicationConfig. Structure is documented below.
- spark_
application_ strid - The id of the application
- spark_
r_ Gdcapplication_ config Spark Application Spark RApplication Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- spark_
sql_ Gdcapplication_ config Spark Application Spark Sql Application Config Args - Represents the SparkRApplicationConfig. Structure is documented below.
- state str
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state_
message str - A message explaining the current state.
- uid str
- System generated unique identifier for this application, formatted as UUID4.
- update_
time str - The timestamp when the resource was most recently updated.
- version str
- The Dataproc version of this application.
- annotations Map<String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field
effective_annotations
for all of the annotations present on the resource. - application
Environment String - An ApplicationEnvironment from which to inherit configuration properties.
- create
Time String - The timestamp when the resource was created.
- dependency
Images List<String> - List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display
Name String - User-provided human-readable name to be used in user interfaces.
- effective
Annotations Map<String> - effective
Labels Map<String> - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field
effective_labels
for all of the labels present on the resource. - location String
- The location of the spark application.
- monitoring
Endpoint String - URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- output
Uri String - An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String>
- application-specific properties.
- pulumi
Labels Map<String> - The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark
Application Property MapConfig - Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- spark
Application Property MapConfig - Represents the SparkApplicationConfig. Structure is documented below.
- spark
Application StringId - The id of the application
- spark
RApplication Property MapConfig - Represents the SparkRApplicationConfig. Structure is documented below.
- spark
Sql Property MapApplication Config - Represents the SparkRApplicationConfig. Structure is documented below.
- state String
- The current state.
Possible values:
STATE_UNSPECIFIED
PENDING
RUNNING
CANCELLING
CANCELLED
SUCCEEDED
FAILED
- state
Message String - A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- update
Time String - The timestamp when the resource was most recently updated.
- version String
- The Dataproc version of this application.
Supporting Types
GdcSparkApplicationPysparkApplicationConfig, GdcSparkApplicationPysparkApplicationConfigArgs
- Main
Python stringFile Uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- Archive
Uris List<string> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - File
Uris List<string> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- Jar
File List<string>Uris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- Python
File List<string>Uris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- Main
Python stringFile Uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- Archive
Uris []string - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - File
Uris []string - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- Jar
File []stringUris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- Python
File []stringUris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main
Python StringFile Uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jar
File List<String>Uris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- python
File List<String>Uris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main
Python stringFile Uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive
Uris string[] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris string[] - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jar
File string[]Uris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- python
File string[]Uris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main_
python_ strfile_ uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive_
uris Sequence[str] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file_
uris Sequence[str] - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jar_
file_ Sequence[str]uris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- python_
file_ Sequence[str]uris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main
Python StringFile Uri - The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jar
File List<String>Uris - HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- python
File List<String>Uris - HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
GdcSparkApplicationSparkApplicationConfig, GdcSparkApplicationSparkApplicationConfigArgs
- Archive
Uris List<string> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - Args List<string>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - File
Uris List<string> - HCFS URIs of files to be placed in the working directory of each executor.
- Jar
File List<string>Uris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- Main
Class string - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - Main
Jar stringFile Uri - The HCFS URI of the jar file that contains the main class.
- Archive
Uris []string - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - Args []string
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - File
Uris []string - HCFS URIs of files to be placed in the working directory of each executor.
- Jar
File []stringUris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- Main
Class string - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - Main
Jar stringFile Uri - The HCFS URI of the jar file that contains the main class.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - args List<String>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor.
- jar
File List<String>Uris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- main
Class String - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - main
Jar StringFile Uri - The HCFS URI of the jar file that contains the main class.
- archive
Uris string[] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - args string[]
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - file
Uris string[] - HCFS URIs of files to be placed in the working directory of each executor.
- jar
File string[]Uris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- main
Class string - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - main
Jar stringFile Uri - The HCFS URI of the jar file that contains the main class.
- archive_
uris Sequence[str] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - args Sequence[str]
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - file_
uris Sequence[str] - HCFS URIs of files to be placed in the working directory of each executor.
- jar_
file_ Sequence[str]uris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- main_
class str - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - main_
jar_ strfile_ uri - The HCFS URI of the jar file that contains the main class.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types:
.jar
,.tar
,.tar.gz
,.tgz
, and.zip
. - args List<String>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as
--conf
, since a collision can occur that causes an incorrect application submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor.
- jar
File List<String>Uris - HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- main
Class String - The name of the driver main class. The jar file that contains the class must be in the classpath or specified in
jar_file_uris
. - main
Jar StringFile Uri - The HCFS URI of the jar file that contains the main class.
GdcSparkApplicationSparkRApplicationConfig, GdcSparkApplicationSparkRApplicationConfigArgs
- Main
RFile stringUri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- Archive
Uris List<string> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - File
Uris List<string> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- Main
RFile stringUri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- Archive
Uris []string - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - File
Uris []string - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- main
RFile StringUri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- main
RFile stringUri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archive
Uris string[] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris string[] - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- main_
r_ strfile_ uri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archive_
uris Sequence[str] - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file_
uris Sequence[str] - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- main
RFile StringUri - The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archive
Uris List<String> - HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as
--conf
, that can be set as job properties, since a collision may occur that causes an incorrect job submission. - file
Uris List<String> - HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
GdcSparkApplicationSparkSqlApplicationConfig, GdcSparkApplicationSparkSqlApplicationConfigArgs
- Jar
File List<string>Uris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- Query
File stringUri - The HCFS URI of the script that contains SQL queries.
- Query
List GdcSpark Application Spark Sql Application Config Query List - Represents a list of queries. Structure is documented below.
- Script
Variables Dictionary<string, string> - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
- Jar
File []stringUris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- Query
File stringUri - The HCFS URI of the script that contains SQL queries.
- Query
List GdcSpark Application Spark Sql Application Config Query List - Represents a list of queries. Structure is documented below.
- Script
Variables map[string]string - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
- jar
File List<String>Uris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- query
File StringUri - The HCFS URI of the script that contains SQL queries.
- query
List GdcSpark Application Spark Sql Application Config Query List - Represents a list of queries. Structure is documented below.
- script
Variables Map<String,String> - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
- jar
File string[]Uris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- query
File stringUri - The HCFS URI of the script that contains SQL queries.
- query
List GdcSpark Application Spark Sql Application Config Query List - Represents a list of queries. Structure is documented below.
- script
Variables {[key: string]: string} - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
- jar_
file_ Sequence[str]uris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- query_
file_ struri - The HCFS URI of the script that contains SQL queries.
- query_
list GdcSpark Application Spark Sql Application Config Query List - Represents a list of queries. Structure is documented below.
- script_
variables Mapping[str, str] - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
- jar
File List<String>Uris - HCFS URIs of jar files to be added to the Spark CLASSPATH.
- query
File StringUri - The HCFS URI of the script that contains SQL queries.
- query
List Property Map - Represents a list of queries. Structure is documented below.
- script
Variables Map<String> - Mapping of query variable names to values (equivalent to the Spark SQL command: SET
name="value";
).
GdcSparkApplicationSparkSqlApplicationConfigQueryList, GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs
- Queries List<string>
- The queries to run.
- Queries []string
- The queries to run.
- queries List<String>
- The queries to run.
- queries string[]
- The queries to run.
- queries Sequence[str]
- The queries to run.
- queries List<String>
- The queries to run.
Import
SparkApplication can be imported using any of these accepted formats:
projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}
{{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}
{{location}}/{{serviceinstance}}/{{spark_application_id}}
When using the pulumi import
command, SparkApplication can be imported using one of the formats above. For example:
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{location}}/{{serviceinstance}}/{{spark_application_id}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
google-beta
Terraform Provider.