gcp.dataproc.GdcSparkApplication
Explore with Pulumi AI
A Spark application is a single Spark workload run on a GDC cluster.
To get more information about SparkApplication, see:
- API documentation
- How-to Guides
Example Usage
Dataprocgdc Sparkapplication Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-spark-app-basic",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    sparkApplicationConfig: {
        mainClass: "org.apache.spark.examples.SparkPi",
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        args: ["10000"],
    },
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-spark-app-basic",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    spark_application_config={
        "main_class": "org.apache.spark.examples.SparkPi",
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "args": ["10000"],
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-spark-app-basic"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
				MainClass: pulumi.String("org.apache.spark.examples.SparkPi"),
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				Args: pulumi.StringArray{
					pulumi.String("10000"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-spark-app-basic",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
        {
            MainClass = "org.apache.spark.examples.SparkPi",
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            Args = new[]
            {
                "10000",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-spark-app-basic")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
                .mainClass("org.apache.spark.examples.SparkPi")
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .args("10000")
                .build())
            .build());
    }
}
resources:
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-spark-app-basic
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      sparkApplicationConfig:
        mainClass: org.apache.spark.examples.SparkPi
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        args:
          - '10000'
Dataprocgdc Sparkapplication
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const appEnv = new gcp.dataproc.GdcApplicationEnvironment("app_env", {
    applicationEnvironmentId: "tf-e2e-spark-app-env",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
});
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-spark-app",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    labels: {
        "test-label": "label-value",
    },
    annotations: {
        an_annotation: "annotation_value",
    },
    properties: {
        "spark.executor.instances": "2",
    },
    applicationEnvironment: appEnv.name,
    version: "1.2",
    sparkApplicationConfig: {
        mainJarFileUri: "file:///usr/lib/spark/examples/jars/spark-examples.jar",
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
        fileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
    },
});
import pulumi
import pulumi_gcp as gcp
app_env = gcp.dataproc.GdcApplicationEnvironment("app_env",
    application_environment_id="tf-e2e-spark-app-env",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default")
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-spark-app",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    labels={
        "test-label": "label-value",
    },
    annotations={
        "an_annotation": "annotation_value",
    },
    properties={
        "spark.executor.instances": "2",
    },
    application_environment=app_env.name,
    version="1.2",
    spark_application_config={
        "main_jar_file_uri": "file:///usr/lib/spark/examples/jars/spark-examples.jar",
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
        "file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		appEnv, err := dataproc.NewGdcApplicationEnvironment(ctx, "app_env", &dataproc.GdcApplicationEnvironmentArgs{
			ApplicationEnvironmentId: pulumi.String("tf-e2e-spark-app-env"),
			Serviceinstance:          pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:                  pulumi.String("my-project"),
			Location:                 pulumi.String("us-west2"),
			Namespace:                pulumi.String("default"),
		})
		if err != nil {
			return err
		}
		_, err = dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-spark-app"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			Labels: pulumi.StringMap{
				"test-label": pulumi.String("label-value"),
			},
			Annotations: pulumi.StringMap{
				"an_annotation": pulumi.String("annotation_value"),
			},
			Properties: pulumi.StringMap{
				"spark.executor.instances": pulumi.String("2"),
			},
			ApplicationEnvironment: appEnv.Name,
			Version:                pulumi.String("1.2"),
			SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
				MainJarFileUri: pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				ArchiveUris: pulumi.StringArray{
					pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var appEnv = new Gcp.Dataproc.GdcApplicationEnvironment("app_env", new()
    {
        ApplicationEnvironmentId = "tf-e2e-spark-app-env",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
    });
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-spark-app",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        Labels = 
        {
            { "test-label", "label-value" },
        },
        Annotations = 
        {
            { "an_annotation", "annotation_value" },
        },
        Properties = 
        {
            { "spark.executor.instances", "2" },
        },
        ApplicationEnvironment = appEnv.Name,
        Version = "1.2",
        SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
        {
            MainJarFileUri = "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            ArchiveUris = new[]
            {
                "file://usr/lib/spark/examples/spark-examples.jar",
            },
            FileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironment;
import com.pulumi.gcp.dataproc.GdcApplicationEnvironmentArgs;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var appEnv = new GdcApplicationEnvironment("appEnv", GdcApplicationEnvironmentArgs.builder()
            .applicationEnvironmentId("tf-e2e-spark-app-env")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .build());
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-spark-app")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .labels(Map.of("test-label", "label-value"))
            .annotations(Map.of("an_annotation", "annotation_value"))
            .properties(Map.of("spark.executor.instances", "2"))
            .applicationEnvironment(appEnv.name())
            .version("1.2")
            .sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
                .mainJarFileUri("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
                .fileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .build())
            .build());
    }
}
resources:
  appEnv:
    type: gcp:dataproc:GdcApplicationEnvironment
    name: app_env
    properties:
      applicationEnvironmentId: tf-e2e-spark-app-env
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-spark-app
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      labels:
        test-label: label-value
      annotations:
        an_annotation: annotation_value
      properties:
        spark.executor.instances: '2'
      applicationEnvironment: ${appEnv.name}
      version: '1.2'
      sparkApplicationConfig:
        mainJarFileUri: file:///usr/lib/spark/examples/jars/spark-examples.jar
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        archiveUris:
          - file://usr/lib/spark/examples/spark-examples.jar
        fileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
Dataprocgdc Sparkapplication Pyspark
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-pyspark-app",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    displayName: "A Pyspark application for a Terraform create test",
    dependencyImages: ["gcr.io/some/image"],
    pysparkApplicationConfig: {
        mainPythonFileUri: "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        pythonFileUris: ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"],
        fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
        archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
        args: ["10"],
    },
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-pyspark-app",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    display_name="A Pyspark application for a Terraform create test",
    dependency_images=["gcr.io/some/image"],
    pyspark_application_config={
        "main_python_file_uri": "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "python_file_uris": ["gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"],
        "file_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
        "archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
        "args": ["10"],
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-pyspark-app"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			DisplayName:        pulumi.String("A Pyspark application for a Terraform create test"),
			DependencyImages: pulumi.StringArray{
				pulumi.String("gcr.io/some/image"),
			},
			PysparkApplicationConfig: &dataproc.GdcSparkApplicationPysparkApplicationConfigArgs{
				MainPythonFileUri: pulumi.String("gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py"),
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				PythonFileUris: pulumi.StringArray{
					pulumi.String("gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py"),
				},
				FileUris: pulumi.StringArray{
					pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
				},
				ArchiveUris: pulumi.StringArray{
					pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
				},
				Args: pulumi.StringArray{
					pulumi.String("10"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-pyspark-app",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        DisplayName = "A Pyspark application for a Terraform create test",
        DependencyImages = new[]
        {
            "gcr.io/some/image",
        },
        PysparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationPysparkApplicationConfigArgs
        {
            MainPythonFileUri = "gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py",
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            PythonFileUris = new[]
            {
                "gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py",
            },
            FileUris = new[]
            {
                "file://usr/lib/spark/examples/spark-examples.jar",
            },
            ArchiveUris = new[]
            {
                "file://usr/lib/spark/examples/spark-examples.jar",
            },
            Args = new[]
            {
                "10",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationPysparkApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-pyspark-app")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .displayName("A Pyspark application for a Terraform create test")
            .dependencyImages("gcr.io/some/image")
            .pysparkApplicationConfig(GdcSparkApplicationPysparkApplicationConfigArgs.builder()
                .mainPythonFileUri("gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py")
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .pythonFileUris("gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py")
                .fileUris("file://usr/lib/spark/examples/spark-examples.jar")
                .archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
                .args("10")
                .build())
            .build());
    }
}
resources:
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-pyspark-app
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      displayName: A Pyspark application for a Terraform create test
      dependencyImages:
        - gcr.io/some/image
      pysparkApplicationConfig:
        mainPythonFileUri: gs://goog-dataproc-initialization-actions-us-west2/conda/test_conda.py
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        pythonFileUris:
          - gs://goog-dataproc-initialization-actions-us-west2/conda/get-sys-exec.py
        fileUris:
          - file://usr/lib/spark/examples/spark-examples.jar
        archiveUris:
          - file://usr/lib/spark/examples/spark-examples.jar
        args:
          - '10'
Dataprocgdc Sparkapplication Sparkr
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-sparkr-app",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    displayName: "A SparkR application for a Terraform create test",
    sparkRApplicationConfig: {
        mainRFileUri: "gs://some-bucket/something.R",
        fileUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
        archiveUris: ["file://usr/lib/spark/examples/spark-examples.jar"],
        args: ["10"],
    },
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-sparkr-app",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    display_name="A SparkR application for a Terraform create test",
    spark_r_application_config={
        "main_r_file_uri": "gs://some-bucket/something.R",
        "file_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
        "archive_uris": ["file://usr/lib/spark/examples/spark-examples.jar"],
        "args": ["10"],
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-sparkr-app"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			DisplayName:        pulumi.String("A SparkR application for a Terraform create test"),
			SparkRApplicationConfig: &dataproc.GdcSparkApplicationSparkRApplicationConfigArgs{
				MainRFileUri: pulumi.String("gs://some-bucket/something.R"),
				FileUris: pulumi.StringArray{
					pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
				},
				ArchiveUris: pulumi.StringArray{
					pulumi.String("file://usr/lib/spark/examples/spark-examples.jar"),
				},
				Args: pulumi.StringArray{
					pulumi.String("10"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-sparkr-app",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        DisplayName = "A SparkR application for a Terraform create test",
        SparkRApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkRApplicationConfigArgs
        {
            MainRFileUri = "gs://some-bucket/something.R",
            FileUris = new[]
            {
                "file://usr/lib/spark/examples/spark-examples.jar",
            },
            ArchiveUris = new[]
            {
                "file://usr/lib/spark/examples/spark-examples.jar",
            },
            Args = new[]
            {
                "10",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkRApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-sparkr-app")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .displayName("A SparkR application for a Terraform create test")
            .sparkRApplicationConfig(GdcSparkApplicationSparkRApplicationConfigArgs.builder()
                .mainRFileUri("gs://some-bucket/something.R")
                .fileUris("file://usr/lib/spark/examples/spark-examples.jar")
                .archiveUris("file://usr/lib/spark/examples/spark-examples.jar")
                .args("10")
                .build())
            .build());
    }
}
resources:
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-sparkr-app
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      displayName: A SparkR application for a Terraform create test
      sparkRApplicationConfig:
        mainRFileUri: gs://some-bucket/something.R
        fileUris:
          - file://usr/lib/spark/examples/spark-examples.jar
        archiveUris:
          - file://usr/lib/spark/examples/spark-examples.jar
        args:
          - '10'
Dataprocgdc Sparkapplication Sparksql
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-sparksql-app",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    displayName: "A SparkSql application for a Terraform create test",
    sparkSqlApplicationConfig: {
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        queryList: {
            queries: ["show tables;"],
        },
        scriptVariables: {
            MY_VAR: "1",
        },
    },
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-sparksql-app",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    display_name="A SparkSql application for a Terraform create test",
    spark_sql_application_config={
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "query_list": {
            "queries": ["show tables;"],
        },
        "script_variables": {
            "MY_VAR": "1",
        },
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-sparksql-app"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			DisplayName:        pulumi.String("A SparkSql application for a Terraform create test"),
			SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				QueryList: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs{
					Queries: pulumi.StringArray{
						pulumi.String("show tables;"),
					},
				},
				ScriptVariables: pulumi.StringMap{
					"MY_VAR": pulumi.String("1"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-sparksql-app",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        DisplayName = "A SparkSql application for a Terraform create test",
        SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
        {
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            QueryList = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs
            {
                Queries = new[]
                {
                    "show tables;",
                },
            },
            ScriptVariables = 
            {
                { "MY_VAR", "1" },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-sparksql-app")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .displayName("A SparkSql application for a Terraform create test")
            .sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .queryList(GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs.builder()
                    .queries("show tables;")
                    .build())
                .scriptVariables(Map.of("MY_VAR", "1"))
                .build())
            .build());
    }
}
resources:
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-sparksql-app
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      displayName: A SparkSql application for a Terraform create test
      sparkSqlApplicationConfig:
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        queryList:
          queries:
            - show tables;
        scriptVariables:
          MY_VAR: '1'
Dataprocgdc Sparkapplication Sparksql Query File
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const spark_application = new gcp.dataproc.GdcSparkApplication("spark-application", {
    sparkApplicationId: "tf-e2e-sparksql-app",
    serviceinstance: "do-not-delete-dataproc-gdc-instance",
    project: "my-project",
    location: "us-west2",
    namespace: "default",
    displayName: "A SparkSql application for a Terraform create test",
    sparkSqlApplicationConfig: {
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        queryFileUri: "gs://some-bucket/something.sql",
        scriptVariables: {
            MY_VAR: "1",
        },
    },
});
import pulumi
import pulumi_gcp as gcp
spark_application = gcp.dataproc.GdcSparkApplication("spark-application",
    spark_application_id="tf-e2e-sparksql-app",
    serviceinstance="do-not-delete-dataproc-gdc-instance",
    project="my-project",
    location="us-west2",
    namespace="default",
    display_name="A SparkSql application for a Terraform create test",
    spark_sql_application_config={
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "query_file_uri": "gs://some-bucket/something.sql",
        "script_variables": {
            "MY_VAR": "1",
        },
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := dataproc.NewGdcSparkApplication(ctx, "spark-application", &dataproc.GdcSparkApplicationArgs{
			SparkApplicationId: pulumi.String("tf-e2e-sparksql-app"),
			Serviceinstance:    pulumi.String("do-not-delete-dataproc-gdc-instance"),
			Project:            pulumi.String("my-project"),
			Location:           pulumi.String("us-west2"),
			Namespace:          pulumi.String("default"),
			DisplayName:        pulumi.String("A SparkSql application for a Terraform create test"),
			SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				QueryFileUri: pulumi.String("gs://some-bucket/something.sql"),
				ScriptVariables: pulumi.StringMap{
					"MY_VAR": pulumi.String("1"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var spark_application = new Gcp.Dataproc.GdcSparkApplication("spark-application", new()
    {
        SparkApplicationId = "tf-e2e-sparksql-app",
        Serviceinstance = "do-not-delete-dataproc-gdc-instance",
        Project = "my-project",
        Location = "us-west2",
        Namespace = "default",
        DisplayName = "A SparkSql application for a Terraform create test",
        SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
        {
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            QueryFileUri = "gs://some-bucket/something.sql",
            ScriptVariables = 
            {
                { "MY_VAR", "1" },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.GdcSparkApplication;
import com.pulumi.gcp.dataproc.GdcSparkApplicationArgs;
import com.pulumi.gcp.dataproc.inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var spark_application = new GdcSparkApplication("spark-application", GdcSparkApplicationArgs.builder()
            .sparkApplicationId("tf-e2e-sparksql-app")
            .serviceinstance("do-not-delete-dataproc-gdc-instance")
            .project("my-project")
            .location("us-west2")
            .namespace("default")
            .displayName("A SparkSql application for a Terraform create test")
            .sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .queryFileUri("gs://some-bucket/something.sql")
                .scriptVariables(Map.of("MY_VAR", "1"))
                .build())
            .build());
    }
}
resources:
  spark-application:
    type: gcp:dataproc:GdcSparkApplication
    properties:
      sparkApplicationId: tf-e2e-sparksql-app
      serviceinstance: do-not-delete-dataproc-gdc-instance
      project: my-project
      location: us-west2
      namespace: default
      displayName: A SparkSql application for a Terraform create test
      sparkSqlApplicationConfig:
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        queryFileUri: gs://some-bucket/something.sql
        scriptVariables:
          MY_VAR: '1'
Create GdcSparkApplication Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new GdcSparkApplication(name: string, args: GdcSparkApplicationArgs, opts?: CustomResourceOptions);@overload
def GdcSparkApplication(resource_name: str,
                        args: GdcSparkApplicationArgs,
                        opts: Optional[ResourceOptions] = None)
@overload
def GdcSparkApplication(resource_name: str,
                        opts: Optional[ResourceOptions] = None,
                        location: Optional[str] = None,
                        spark_application_id: Optional[str] = None,
                        serviceinstance: Optional[str] = None,
                        labels: Optional[Mapping[str, str]] = None,
                        annotations: Optional[Mapping[str, str]] = None,
                        display_name: Optional[str] = None,
                        namespace: Optional[str] = None,
                        project: Optional[str] = None,
                        properties: Optional[Mapping[str, str]] = None,
                        pyspark_application_config: Optional[GdcSparkApplicationPysparkApplicationConfigArgs] = None,
                        dependency_images: Optional[Sequence[str]] = None,
                        spark_application_config: Optional[GdcSparkApplicationSparkApplicationConfigArgs] = None,
                        application_environment: Optional[str] = None,
                        spark_r_application_config: Optional[GdcSparkApplicationSparkRApplicationConfigArgs] = None,
                        spark_sql_application_config: Optional[GdcSparkApplicationSparkSqlApplicationConfigArgs] = None,
                        version: Optional[str] = None)func NewGdcSparkApplication(ctx *Context, name string, args GdcSparkApplicationArgs, opts ...ResourceOption) (*GdcSparkApplication, error)public GdcSparkApplication(string name, GdcSparkApplicationArgs args, CustomResourceOptions? opts = null)
public GdcSparkApplication(String name, GdcSparkApplicationArgs args)
public GdcSparkApplication(String name, GdcSparkApplicationArgs args, CustomResourceOptions options)
type: gcp:dataproc:GdcSparkApplication
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args GdcSparkApplicationArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var gdcSparkApplicationResource = new Gcp.Dataproc.GdcSparkApplication("gdcSparkApplicationResource", new()
{
    Location = "string",
    SparkApplicationId = "string",
    Serviceinstance = "string",
    Labels = 
    {
        { "string", "string" },
    },
    Annotations = 
    {
        { "string", "string" },
    },
    DisplayName = "string",
    Namespace = "string",
    Project = "string",
    Properties = 
    {
        { "string", "string" },
    },
    PysparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationPysparkApplicationConfigArgs
    {
        MainPythonFileUri = "string",
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        JarFileUris = new[]
        {
            "string",
        },
        PythonFileUris = new[]
        {
            "string",
        },
    },
    DependencyImages = new[]
    {
        "string",
    },
    SparkApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkApplicationConfigArgs
    {
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        JarFileUris = new[]
        {
            "string",
        },
        MainClass = "string",
        MainJarFileUri = "string",
    },
    ApplicationEnvironment = "string",
    SparkRApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkRApplicationConfigArgs
    {
        MainRFileUri = "string",
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
    },
    SparkSqlApplicationConfig = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigArgs
    {
        JarFileUris = new[]
        {
            "string",
        },
        QueryFileUri = "string",
        QueryList = new Gcp.Dataproc.Inputs.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs
        {
            Queries = new[]
            {
                "string",
            },
        },
        ScriptVariables = 
        {
            { "string", "string" },
        },
    },
    Version = "string",
});
example, err := dataproc.NewGdcSparkApplication(ctx, "gdcSparkApplicationResource", &dataproc.GdcSparkApplicationArgs{
	Location:           pulumi.String("string"),
	SparkApplicationId: pulumi.String("string"),
	Serviceinstance:    pulumi.String("string"),
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Annotations: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	DisplayName: pulumi.String("string"),
	Namespace:   pulumi.String("string"),
	Project:     pulumi.String("string"),
	Properties: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	PysparkApplicationConfig: &dataproc.GdcSparkApplicationPysparkApplicationConfigArgs{
		MainPythonFileUri: pulumi.String("string"),
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		PythonFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	DependencyImages: pulumi.StringArray{
		pulumi.String("string"),
	},
	SparkApplicationConfig: &dataproc.GdcSparkApplicationSparkApplicationConfigArgs{
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		MainClass:      pulumi.String("string"),
		MainJarFileUri: pulumi.String("string"),
	},
	ApplicationEnvironment: pulumi.String("string"),
	SparkRApplicationConfig: &dataproc.GdcSparkApplicationSparkRApplicationConfigArgs{
		MainRFileUri: pulumi.String("string"),
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	SparkSqlApplicationConfig: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigArgs{
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		QueryFileUri: pulumi.String("string"),
		QueryList: &dataproc.GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs{
			Queries: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
		ScriptVariables: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	Version: pulumi.String("string"),
})
var gdcSparkApplicationResource = new GdcSparkApplication("gdcSparkApplicationResource", GdcSparkApplicationArgs.builder()
    .location("string")
    .sparkApplicationId("string")
    .serviceinstance("string")
    .labels(Map.of("string", "string"))
    .annotations(Map.of("string", "string"))
    .displayName("string")
    .namespace("string")
    .project("string")
    .properties(Map.of("string", "string"))
    .pysparkApplicationConfig(GdcSparkApplicationPysparkApplicationConfigArgs.builder()
        .mainPythonFileUri("string")
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .jarFileUris("string")
        .pythonFileUris("string")
        .build())
    .dependencyImages("string")
    .sparkApplicationConfig(GdcSparkApplicationSparkApplicationConfigArgs.builder()
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .jarFileUris("string")
        .mainClass("string")
        .mainJarFileUri("string")
        .build())
    .applicationEnvironment("string")
    .sparkRApplicationConfig(GdcSparkApplicationSparkRApplicationConfigArgs.builder()
        .mainRFileUri("string")
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .build())
    .sparkSqlApplicationConfig(GdcSparkApplicationSparkSqlApplicationConfigArgs.builder()
        .jarFileUris("string")
        .queryFileUri("string")
        .queryList(GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs.builder()
            .queries("string")
            .build())
        .scriptVariables(Map.of("string", "string"))
        .build())
    .version("string")
    .build());
gdc_spark_application_resource = gcp.dataproc.GdcSparkApplication("gdcSparkApplicationResource",
    location="string",
    spark_application_id="string",
    serviceinstance="string",
    labels={
        "string": "string",
    },
    annotations={
        "string": "string",
    },
    display_name="string",
    namespace="string",
    project="string",
    properties={
        "string": "string",
    },
    pyspark_application_config={
        "main_python_file_uri": "string",
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
        "jar_file_uris": ["string"],
        "python_file_uris": ["string"],
    },
    dependency_images=["string"],
    spark_application_config={
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
        "jar_file_uris": ["string"],
        "main_class": "string",
        "main_jar_file_uri": "string",
    },
    application_environment="string",
    spark_r_application_config={
        "main_r_file_uri": "string",
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
    },
    spark_sql_application_config={
        "jar_file_uris": ["string"],
        "query_file_uri": "string",
        "query_list": {
            "queries": ["string"],
        },
        "script_variables": {
            "string": "string",
        },
    },
    version="string")
const gdcSparkApplicationResource = new gcp.dataproc.GdcSparkApplication("gdcSparkApplicationResource", {
    location: "string",
    sparkApplicationId: "string",
    serviceinstance: "string",
    labels: {
        string: "string",
    },
    annotations: {
        string: "string",
    },
    displayName: "string",
    namespace: "string",
    project: "string",
    properties: {
        string: "string",
    },
    pysparkApplicationConfig: {
        mainPythonFileUri: "string",
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
        jarFileUris: ["string"],
        pythonFileUris: ["string"],
    },
    dependencyImages: ["string"],
    sparkApplicationConfig: {
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
        jarFileUris: ["string"],
        mainClass: "string",
        mainJarFileUri: "string",
    },
    applicationEnvironment: "string",
    sparkRApplicationConfig: {
        mainRFileUri: "string",
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
    },
    sparkSqlApplicationConfig: {
        jarFileUris: ["string"],
        queryFileUri: "string",
        queryList: {
            queries: ["string"],
        },
        scriptVariables: {
            string: "string",
        },
    },
    version: "string",
});
type: gcp:dataproc:GdcSparkApplication
properties:
    annotations:
        string: string
    applicationEnvironment: string
    dependencyImages:
        - string
    displayName: string
    labels:
        string: string
    location: string
    namespace: string
    project: string
    properties:
        string: string
    pysparkApplicationConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        jarFileUris:
            - string
        mainPythonFileUri: string
        pythonFileUris:
            - string
    serviceinstance: string
    sparkApplicationConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        jarFileUris:
            - string
        mainClass: string
        mainJarFileUri: string
    sparkApplicationId: string
    sparkRApplicationConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        mainRFileUri: string
    sparkSqlApplicationConfig:
        jarFileUris:
            - string
        queryFileUri: string
        queryList:
            queries:
                - string
        scriptVariables:
            string: string
    version: string
GdcSparkApplication Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The GdcSparkApplication resource accepts the following input properties:
- Location string
- The location of the spark application.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- SparkApplication stringId 
- The id of the application
- Annotations Dictionary<string, string>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- ApplicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- DependencyImages List<string>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- DisplayName string
- User-provided human-readable name to be used in user interfaces.
- Labels Dictionary<string, string>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties Dictionary<string, string>
- application-specific properties.
- PysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- SparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- SparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- SparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- Version string
- The Dataproc version of this application.
- Location string
- The location of the spark application.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- SparkApplication stringId 
- The id of the application
- Annotations map[string]string
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- ApplicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- DependencyImages []string
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- DisplayName string
- User-provided human-readable name to be used in user interfaces.
- Labels map[string]string
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties map[string]string
- application-specific properties.
- PysparkApplication GdcConfig Spark Application Pyspark Application Config Args 
- Represents the PySparkApplicationConfig. Structure is documented below.
- SparkApplication GdcConfig Spark Application Spark Application Config Args 
- Represents the SparkApplicationConfig. Structure is documented below.
- SparkRApplication GdcConfig Spark Application Spark RApplication Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- SparkSql GdcApplication Config Spark Application Spark Sql Application Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- Version string
- The Dataproc version of this application.
- location String
- The location of the spark application.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- sparkApplication StringId 
- The id of the application
- annotations Map<String,String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment String
- An ApplicationEnvironment from which to inherit configuration properties.
- dependencyImages List<String>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName String
- User-provided human-readable name to be used in user interfaces.
- labels Map<String,String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String,String>
- application-specific properties.
- pysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- sparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- version String
- The Dataproc version of this application.
- location string
- The location of the spark application.
- serviceinstance string
- The id of the service instance to which this spark application belongs.
- sparkApplication stringId 
- The id of the application
- annotations {[key: string]: string}
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- dependencyImages string[]
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName string
- User-provided human-readable name to be used in user interfaces.
- labels {[key: string]: string}
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties {[key: string]: string}
- application-specific properties.
- pysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- sparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- version string
- The Dataproc version of this application.
- location str
- The location of the spark application.
- serviceinstance str
- The id of the service instance to which this spark application belongs.
- spark_application_ strid 
- The id of the application
- annotations Mapping[str, str]
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- application_environment str
- An ApplicationEnvironment from which to inherit configuration properties.
- dependency_images Sequence[str]
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display_name str
- User-provided human-readable name to be used in user interfaces.
- labels Mapping[str, str]
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- namespace str
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Mapping[str, str]
- application-specific properties.
- pyspark_application_ Gdcconfig Spark Application Pyspark Application Config Args 
- Represents the PySparkApplicationConfig. Structure is documented below.
- spark_application_ Gdcconfig Spark Application Spark Application Config Args 
- Represents the SparkApplicationConfig. Structure is documented below.
- spark_r_ Gdcapplication_ config Spark Application Spark RApplication Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- spark_sql_ Gdcapplication_ config Spark Application Spark Sql Application Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- version str
- The Dataproc version of this application.
- location String
- The location of the spark application.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- sparkApplication StringId 
- The id of the application
- annotations Map<String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment String
- An ApplicationEnvironment from which to inherit configuration properties.
- dependencyImages List<String>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName String
- User-provided human-readable name to be used in user interfaces.
- labels Map<String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String>
- application-specific properties.
- pysparkApplication Property MapConfig 
- Represents the PySparkApplicationConfig. Structure is documented below.
- sparkApplication Property MapConfig 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkRApplication Property MapConfig 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql Property MapApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- version String
- The Dataproc version of this application.
Outputs
All input properties are implicitly available as output properties. Additionally, the GdcSparkApplication resource produces the following output properties:
- CreateTime string
- The timestamp when the resource was created.
- EffectiveAnnotations Dictionary<string, string>
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- MonitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- OutputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- State string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- StateMessage string
- A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- UpdateTime string
- The timestamp when the resource was most recently updated.
- CreateTime string
- The timestamp when the resource was created.
- EffectiveAnnotations map[string]string
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Id string
- The provider-assigned unique ID for this managed resource.
- MonitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- OutputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- State string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- StateMessage string
- A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- UpdateTime string
- The timestamp when the resource was most recently updated.
- createTime String
- The timestamp when the resource was created.
- effectiveAnnotations Map<String,String>
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- monitoringEndpoint String
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- outputUri String
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state String
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage String
- A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- updateTime String
- The timestamp when the resource was most recently updated.
- createTime string
- The timestamp when the resource was created.
- effectiveAnnotations {[key: string]: string}
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id string
- The provider-assigned unique ID for this managed resource.
- monitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- outputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage string
- A message explaining the current state.
- uid string
- System generated unique identifier for this application, formatted as UUID4.
- updateTime string
- The timestamp when the resource was most recently updated.
- create_time str
- The timestamp when the resource was created.
- effective_annotations Mapping[str, str]
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id str
- The provider-assigned unique ID for this managed resource.
- monitoring_endpoint str
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name str
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- output_uri str
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state str
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- state_message str
- A message explaining the current state.
- uid str
- System generated unique identifier for this application, formatted as UUID4.
- update_time str
- The timestamp when the resource was most recently updated.
- createTime String
- The timestamp when the resource was created.
- effectiveAnnotations Map<String>
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- id String
- The provider-assigned unique ID for this managed resource.
- monitoringEndpoint String
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- outputUri String
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- state String
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage String
- A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- updateTime String
- The timestamp when the resource was most recently updated.
Look up Existing GdcSparkApplication Resource
Get an existing GdcSparkApplication resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: GdcSparkApplicationState, opts?: CustomResourceOptions): GdcSparkApplication@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        annotations: Optional[Mapping[str, str]] = None,
        application_environment: Optional[str] = None,
        create_time: Optional[str] = None,
        dependency_images: Optional[Sequence[str]] = None,
        display_name: Optional[str] = None,
        effective_annotations: Optional[Mapping[str, str]] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        labels: Optional[Mapping[str, str]] = None,
        location: Optional[str] = None,
        monitoring_endpoint: Optional[str] = None,
        name: Optional[str] = None,
        namespace: Optional[str] = None,
        output_uri: Optional[str] = None,
        project: Optional[str] = None,
        properties: Optional[Mapping[str, str]] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        pyspark_application_config: Optional[GdcSparkApplicationPysparkApplicationConfigArgs] = None,
        reconciling: Optional[bool] = None,
        serviceinstance: Optional[str] = None,
        spark_application_config: Optional[GdcSparkApplicationSparkApplicationConfigArgs] = None,
        spark_application_id: Optional[str] = None,
        spark_r_application_config: Optional[GdcSparkApplicationSparkRApplicationConfigArgs] = None,
        spark_sql_application_config: Optional[GdcSparkApplicationSparkSqlApplicationConfigArgs] = None,
        state: Optional[str] = None,
        state_message: Optional[str] = None,
        uid: Optional[str] = None,
        update_time: Optional[str] = None,
        version: Optional[str] = None) -> GdcSparkApplicationfunc GetGdcSparkApplication(ctx *Context, name string, id IDInput, state *GdcSparkApplicationState, opts ...ResourceOption) (*GdcSparkApplication, error)public static GdcSparkApplication Get(string name, Input<string> id, GdcSparkApplicationState? state, CustomResourceOptions? opts = null)public static GdcSparkApplication get(String name, Output<String> id, GdcSparkApplicationState state, CustomResourceOptions options)resources:  _:    type: gcp:dataproc:GdcSparkApplication    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Annotations Dictionary<string, string>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- ApplicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- CreateTime string
- The timestamp when the resource was created.
- DependencyImages List<string>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- DisplayName string
- User-provided human-readable name to be used in user interfaces.
- EffectiveAnnotations Dictionary<string, string>
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels Dictionary<string, string>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- Location string
- The location of the spark application.
- MonitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- OutputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties Dictionary<string, string>
- application-specific properties.
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- PysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- SparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- SparkApplication stringId 
- The id of the application
- SparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- SparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- State string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- StateMessage string
- A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- UpdateTime string
- The timestamp when the resource was most recently updated.
- Version string
- The Dataproc version of this application.
- Annotations map[string]string
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- ApplicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- CreateTime string
- The timestamp when the resource was created.
- DependencyImages []string
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- DisplayName string
- User-provided human-readable name to be used in user interfaces.
- EffectiveAnnotations map[string]string
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Labels map[string]string
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- Location string
- The location of the spark application.
- MonitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- Name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- Namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- OutputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Properties map[string]string
- application-specific properties.
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- PysparkApplication GdcConfig Spark Application Pyspark Application Config Args 
- Represents the PySparkApplicationConfig. Structure is documented below.
- Reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- Serviceinstance string
- The id of the service instance to which this spark application belongs.
- SparkApplication GdcConfig Spark Application Spark Application Config Args 
- Represents the SparkApplicationConfig. Structure is documented below.
- SparkApplication stringId 
- The id of the application
- SparkRApplication GdcConfig Spark Application Spark RApplication Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- SparkSql GdcApplication Config Spark Application Spark Sql Application Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- State string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- StateMessage string
- A message explaining the current state.
- Uid string
- System generated unique identifier for this application, formatted as UUID4.
- UpdateTime string
- The timestamp when the resource was most recently updated.
- Version string
- The Dataproc version of this application.
- annotations Map<String,String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment String
- An ApplicationEnvironment from which to inherit configuration properties.
- createTime String
- The timestamp when the resource was created.
- dependencyImages List<String>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName String
- User-provided human-readable name to be used in user interfaces.
- effectiveAnnotations Map<String,String>
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String,String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- location String
- The location of the spark application.
- monitoringEndpoint String
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- outputUri String
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String,String>
- application-specific properties.
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- sparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkApplication StringId 
- The id of the application
- sparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- state String
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage String
- A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- updateTime String
- The timestamp when the resource was most recently updated.
- version String
- The Dataproc version of this application.
- annotations {[key: string]: string}
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment string
- An ApplicationEnvironment from which to inherit configuration properties.
- createTime string
- The timestamp when the resource was created.
- dependencyImages string[]
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName string
- User-provided human-readable name to be used in user interfaces.
- effectiveAnnotations {[key: string]: string}
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels {[key: string]: string}
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- location string
- The location of the spark application.
- monitoringEndpoint string
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name string
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace string
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- outputUri string
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties {[key: string]: string}
- application-specific properties.
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkApplication GdcConfig Spark Application Pyspark Application Config 
- Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance string
- The id of the service instance to which this spark application belongs.
- sparkApplication GdcConfig Spark Application Spark Application Config 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkApplication stringId 
- The id of the application
- sparkRApplication GdcConfig Spark Application Spark RApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql GdcApplication Config Spark Application Spark Sql Application Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- state string
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage string
- A message explaining the current state.
- uid string
- System generated unique identifier for this application, formatted as UUID4.
- updateTime string
- The timestamp when the resource was most recently updated.
- version string
- The Dataproc version of this application.
- annotations Mapping[str, str]
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- application_environment str
- An ApplicationEnvironment from which to inherit configuration properties.
- create_time str
- The timestamp when the resource was created.
- dependency_images Sequence[str]
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- display_name str
- User-provided human-readable name to be used in user interfaces.
- effective_annotations Mapping[str, str]
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Mapping[str, str]
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- location str
- The location of the spark application.
- monitoring_endpoint str
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name str
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace str
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- output_uri str
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Mapping[str, str]
- application-specific properties.
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark_application_ Gdcconfig Spark Application Pyspark Application Config Args 
- Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling bool
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance str
- The id of the service instance to which this spark application belongs.
- spark_application_ Gdcconfig Spark Application Spark Application Config Args 
- Represents the SparkApplicationConfig. Structure is documented below.
- spark_application_ strid 
- The id of the application
- spark_r_ Gdcapplication_ config Spark Application Spark RApplication Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- spark_sql_ Gdcapplication_ config Spark Application Spark Sql Application Config Args 
- Represents the SparkRApplicationConfig. Structure is documented below.
- state str
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- state_message str
- A message explaining the current state.
- uid str
- System generated unique identifier for this application, formatted as UUID4.
- update_time str
- The timestamp when the resource was most recently updated.
- version str
- The Dataproc version of this application.
- annotations Map<String>
- The annotations to associate with this application. Annotations may be used to store client information, but are not used by the server.
Note: This field is non-authoritative, and will only manage the annotations present in your configuration.
Please refer to the field effective_annotationsfor all of the annotations present on the resource.
- applicationEnvironment String
- An ApplicationEnvironment from which to inherit configuration properties.
- createTime String
- The timestamp when the resource was created.
- dependencyImages List<String>
- List of container image uris for additional file dependencies. Dependent files are sequentially copied from each image. If a file with the same name exists in 2 images then the file from later image is used.
- displayName String
- User-provided human-readable name to be used in user interfaces.
- effectiveAnnotations Map<String>
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- labels Map<String>
- The labels to associate with this application. Labels may be used for filtering and billing tracking.
Note: This field is non-authoritative, and will only manage the labels present in your configuration.
Please refer to the field effective_labelsfor all of the labels present on the resource.
- location String
- The location of the spark application.
- monitoringEndpoint String
- URL for a monitoring UI for this application (for eventual Spark PHS/UI support) Out of scope for private GA
- name String
- Identifier. The name of the application. Format: projects/{project}/locations/{location}/serviceInstances/{service_instance}/sparkApplications/{application}
- namespace String
- The Kubernetes namespace in which to create the application. This namespace must already exist on the cluster.
- outputUri String
- An HCFS URI pointing to the location of stdout and stdout of the application Mainly useful for Pantheon and gcloud Not in scope for private GA
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- properties Map<String>
- application-specific properties.
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkApplication Property MapConfig 
- Represents the PySparkApplicationConfig. Structure is documented below.
- reconciling Boolean
- Whether the application is currently reconciling. True if the current state of the resource does not match the intended state, and the system is working to reconcile them, whether or not the change was user initiated.
- serviceinstance String
- The id of the service instance to which this spark application belongs.
- sparkApplication Property MapConfig 
- Represents the SparkApplicationConfig. Structure is documented below.
- sparkApplication StringId 
- The id of the application
- sparkRApplication Property MapConfig 
- Represents the SparkRApplicationConfig. Structure is documented below.
- sparkSql Property MapApplication Config 
- Represents the SparkRApplicationConfig. Structure is documented below.
- state String
- The current state.
Possible values:- STATE_UNSPECIFIED
- PENDING
- RUNNING
- CANCELLING
- CANCELLED
- SUCCEEDED
- FAILED
 
- stateMessage String
- A message explaining the current state.
- uid String
- System generated unique identifier for this application, formatted as UUID4.
- updateTime String
- The timestamp when the resource was most recently updated.
- version String
- The Dataproc version of this application.
Supporting Types
GdcSparkApplicationPysparkApplicationConfig, GdcSparkApplicationPysparkApplicationConfigArgs            
- MainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris List<string>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- PythonFile List<string>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- MainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- ArchiveUris []string
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris []string
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- PythonFile []stringUris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython StringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- pythonFile List<String>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris string[]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris string[]
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- pythonFile string[]Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main_python_ strfile_ uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- file_uris Sequence[str]
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- python_file_ Sequence[str]uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython StringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- pythonFile List<String>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
GdcSparkApplicationSparkApplicationConfig, GdcSparkApplicationSparkApplicationConfigArgs            
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- FileUris List<string>
- HCFS URIs of files to be placed in the working directory of each executor.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- MainClass string
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- MainJar stringFile Uri 
- The HCFS URI of the jar file that contains the main class.
- ArchiveUris []string
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- FileUris []string
- HCFS URIs of files to be placed in the working directory of each executor.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- MainClass string
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- MainJar stringFile Uri 
- The HCFS URI of the jar file that contains the main class.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- mainClass String
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- mainJar StringFile Uri 
- The HCFS URI of the jar file that contains the main class.
- archiveUris string[]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- fileUris string[]
- HCFS URIs of files to be placed in the working directory of each executor.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- mainClass string
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- mainJar stringFile Uri 
- The HCFS URI of the jar file that contains the main class.
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- file_uris Sequence[str]
- HCFS URIs of files to be placed in the working directory of each executor.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- main_class str
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- main_jar_ strfile_ uri 
- The HCFS URI of the jar file that contains the main class.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar,.tar,.tar.gz,.tgz, and.zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments that can be set as application properties, such as --conf, since a collision can occur that causes an incorrect application submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the classpath of the Spark driver and tasks.
- mainClass String
- The name of the driver main class. The jar file that contains the class must be in the classpath or specified in jar_file_uris.
- mainJar StringFile Uri 
- The HCFS URI of the jar file that contains the main class.
GdcSparkApplicationSparkRApplicationConfig, GdcSparkApplicationSparkRApplicationConfigArgs            
- MainRFile stringUri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris List<string>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- MainRFile stringUri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- ArchiveUris []string
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris []string
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- mainRFile StringUri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- mainRFile stringUri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archiveUris string[]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris string[]
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- main_r_ strfile_ uri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- file_uris Sequence[str]
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
- mainRFile StringUri 
- The HCFS URI of the main R file to use as the driver. Must be a .R file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted into the working directory of each executor. Supported file types: .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be placed in the working directory of each executor. Useful for naively parallel tasks.
GdcSparkApplicationSparkSqlApplicationConfig, GdcSparkApplicationSparkSqlApplicationConfigArgs              
- JarFile List<string>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
- QueryList GdcSpark Application Spark Sql Application Config Query List 
- Represents a list of queries. Structure is documented below.
- ScriptVariables Dictionary<string, string>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- JarFile []stringUris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
- QueryList GdcSpark Application Spark Sql Application Config Query List 
- Represents a list of queries. Structure is documented below.
- ScriptVariables map[string]string
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile List<String>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
- queryList GdcSpark Application Spark Sql Application Config Query List 
- Represents a list of queries. Structure is documented below.
- scriptVariables Map<String,String>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile string[]Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- queryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
- queryList GdcSpark Application Spark Sql Application Config Query List 
- Represents a list of queries. Structure is documented below.
- scriptVariables {[key: string]: string}
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- query_file_ struri 
- The HCFS URI of the script that contains SQL queries.
- query_list GdcSpark Application Spark Sql Application Config Query List 
- Represents a list of queries. Structure is documented below.
- script_variables Mapping[str, str]
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile List<String>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
- queryList Property Map
- Represents a list of queries. Structure is documented below.
- scriptVariables Map<String>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
GdcSparkApplicationSparkSqlApplicationConfigQueryList, GdcSparkApplicationSparkSqlApplicationConfigQueryListArgs                  
- Queries List<string>
- The queries to run.
- Queries []string
- The queries to run.
- queries List<String>
- The queries to run.
- queries string[]
- The queries to run.
- queries Sequence[str]
- The queries to run.
- queries List<String>
- The queries to run.
Import
SparkApplication can be imported using any of these accepted formats:
- projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}
- {{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}
- {{location}}/{{serviceinstance}}/{{spark_application_id}}
When using the pulumi import command, SparkApplication can be imported using one of the formats above. For example:
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default projects/{{project}}/locations/{{location}}/serviceInstances/{{serviceinstance}}/sparkApplications/{{spark_application_id}}
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{project}}/{{location}}/{{serviceinstance}}/{{spark_application_id}}
$ pulumi import gcp:dataproc/gdcSparkApplication:GdcSparkApplication default {{location}}/{{serviceinstance}}/{{spark_application_id}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the google-betaTerraform Provider.