gcp.dataproc.Job
Explore with Pulumi AI
Manages a job resource within a Dataproc cluster within GCE. For more information see the official dataproc documentation.
!> Note: This resource does not support ‘update’ and changing any attributes will cause the resource to be recreated.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const mycluster = new gcp.dataproc.Cluster("mycluster", {
    name: "dproc-cluster-unique-name",
    region: "us-central1",
});
// Submit an example spark job to a dataproc cluster
const spark = new gcp.dataproc.Job("spark", {
    region: mycluster.region,
    forceDelete: true,
    placement: {
        clusterName: mycluster.name,
    },
    sparkConfig: {
        mainClass: "org.apache.spark.examples.SparkPi",
        jarFileUris: ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        args: ["1000"],
        properties: {
            "spark.logConf": "true",
        },
        loggingConfig: {
            driverLogLevels: {
                root: "INFO",
            },
        },
    },
});
// Submit an example pyspark job to a dataproc cluster
const pyspark = new gcp.dataproc.Job("pyspark", {
    region: mycluster.region,
    forceDelete: true,
    placement: {
        clusterName: mycluster.name,
    },
    pysparkConfig: {
        mainPythonFileUri: "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
        properties: {
            "spark.logConf": "true",
        },
    },
});
export const sparkStatus = spark.statuses.apply(statuses => statuses[0].state);
export const pysparkStatus = pyspark.statuses.apply(statuses => statuses[0].state);
import pulumi
import pulumi_gcp as gcp
mycluster = gcp.dataproc.Cluster("mycluster",
    name="dproc-cluster-unique-name",
    region="us-central1")
# Submit an example spark job to a dataproc cluster
spark = gcp.dataproc.Job("spark",
    region=mycluster.region,
    force_delete=True,
    placement={
        "cluster_name": mycluster.name,
    },
    spark_config={
        "main_class": "org.apache.spark.examples.SparkPi",
        "jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
        "args": ["1000"],
        "properties": {
            "spark.logConf": "true",
        },
        "logging_config": {
            "driver_log_levels": {
                "root": "INFO",
            },
        },
    })
# Submit an example pyspark job to a dataproc cluster
pyspark = gcp.dataproc.Job("pyspark",
    region=mycluster.region,
    force_delete=True,
    placement={
        "cluster_name": mycluster.name,
    },
    pyspark_config={
        "main_python_file_uri": "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
        "properties": {
            "spark.logConf": "true",
        },
    })
pulumi.export("sparkStatus", spark.statuses[0].state)
pulumi.export("pysparkStatus", pyspark.statuses[0].state)
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/dataproc"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		mycluster, err := dataproc.NewCluster(ctx, "mycluster", &dataproc.ClusterArgs{
			Name:   pulumi.String("dproc-cluster-unique-name"),
			Region: pulumi.String("us-central1"),
		})
		if err != nil {
			return err
		}
		// Submit an example spark job to a dataproc cluster
		spark, err := dataproc.NewJob(ctx, "spark", &dataproc.JobArgs{
			Region:      mycluster.Region,
			ForceDelete: pulumi.Bool(true),
			Placement: &dataproc.JobPlacementArgs{
				ClusterName: mycluster.Name,
			},
			SparkConfig: &dataproc.JobSparkConfigArgs{
				MainClass: pulumi.String("org.apache.spark.examples.SparkPi"),
				JarFileUris: pulumi.StringArray{
					pulumi.String("file:///usr/lib/spark/examples/jars/spark-examples.jar"),
				},
				Args: pulumi.StringArray{
					pulumi.String("1000"),
				},
				Properties: pulumi.StringMap{
					"spark.logConf": pulumi.String("true"),
				},
				LoggingConfig: &dataproc.JobSparkConfigLoggingConfigArgs{
					DriverLogLevels: pulumi.StringMap{
						"root": pulumi.String("INFO"),
					},
				},
			},
		})
		if err != nil {
			return err
		}
		// Submit an example pyspark job to a dataproc cluster
		pyspark, err := dataproc.NewJob(ctx, "pyspark", &dataproc.JobArgs{
			Region:      mycluster.Region,
			ForceDelete: pulumi.Bool(true),
			Placement: &dataproc.JobPlacementArgs{
				ClusterName: mycluster.Name,
			},
			PysparkConfig: &dataproc.JobPysparkConfigArgs{
				MainPythonFileUri: pulumi.String("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py"),
				Properties: pulumi.StringMap{
					"spark.logConf": pulumi.String("true"),
				},
			},
		})
		if err != nil {
			return err
		}
		ctx.Export("sparkStatus", spark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (*string, error) {
			return &statuses[0].State, nil
		}).(pulumi.StringPtrOutput))
		ctx.Export("pysparkStatus", pyspark.Statuses.ApplyT(func(statuses []dataproc.JobStatus) (*string, error) {
			return &statuses[0].State, nil
		}).(pulumi.StringPtrOutput))
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var mycluster = new Gcp.Dataproc.Cluster("mycluster", new()
    {
        Name = "dproc-cluster-unique-name",
        Region = "us-central1",
    });
    // Submit an example spark job to a dataproc cluster
    var spark = new Gcp.Dataproc.Job("spark", new()
    {
        Region = mycluster.Region,
        ForceDelete = true,
        Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
        {
            ClusterName = mycluster.Name,
        },
        SparkConfig = new Gcp.Dataproc.Inputs.JobSparkConfigArgs
        {
            MainClass = "org.apache.spark.examples.SparkPi",
            JarFileUris = new[]
            {
                "file:///usr/lib/spark/examples/jars/spark-examples.jar",
            },
            Args = new[]
            {
                "1000",
            },
            Properties = 
            {
                { "spark.logConf", "true" },
            },
            LoggingConfig = new Gcp.Dataproc.Inputs.JobSparkConfigLoggingConfigArgs
            {
                DriverLogLevels = 
                {
                    { "root", "INFO" },
                },
            },
        },
    });
    // Submit an example pyspark job to a dataproc cluster
    var pyspark = new Gcp.Dataproc.Job("pyspark", new()
    {
        Region = mycluster.Region,
        ForceDelete = true,
        Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
        {
            ClusterName = mycluster.Name,
        },
        PysparkConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigArgs
        {
            MainPythonFileUri = "gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py",
            Properties = 
            {
                { "spark.logConf", "true" },
            },
        },
    });
    return new Dictionary<string, object?>
    {
        ["sparkStatus"] = spark.Statuses.Apply(statuses => statuses[0].State),
        ["pysparkStatus"] = pyspark.Statuses.Apply(statuses => statuses[0].State),
    };
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.dataproc.Cluster;
import com.pulumi.gcp.dataproc.ClusterArgs;
import com.pulumi.gcp.dataproc.Job;
import com.pulumi.gcp.dataproc.JobArgs;
import com.pulumi.gcp.dataproc.inputs.JobPlacementArgs;
import com.pulumi.gcp.dataproc.inputs.JobSparkConfigArgs;
import com.pulumi.gcp.dataproc.inputs.JobSparkConfigLoggingConfigArgs;
import com.pulumi.gcp.dataproc.inputs.JobPysparkConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var mycluster = new Cluster("mycluster", ClusterArgs.builder()
            .name("dproc-cluster-unique-name")
            .region("us-central1")
            .build());
        // Submit an example spark job to a dataproc cluster
        var spark = new Job("spark", JobArgs.builder()
            .region(mycluster.region())
            .forceDelete(true)
            .placement(JobPlacementArgs.builder()
                .clusterName(mycluster.name())
                .build())
            .sparkConfig(JobSparkConfigArgs.builder()
                .mainClass("org.apache.spark.examples.SparkPi")
                .jarFileUris("file:///usr/lib/spark/examples/jars/spark-examples.jar")
                .args("1000")
                .properties(Map.of("spark.logConf", "true"))
                .loggingConfig(JobSparkConfigLoggingConfigArgs.builder()
                    .driverLogLevels(Map.of("root", "INFO"))
                    .build())
                .build())
            .build());
        // Submit an example pyspark job to a dataproc cluster
        var pyspark = new Job("pyspark", JobArgs.builder()
            .region(mycluster.region())
            .forceDelete(true)
            .placement(JobPlacementArgs.builder()
                .clusterName(mycluster.name())
                .build())
            .pysparkConfig(JobPysparkConfigArgs.builder()
                .mainPythonFileUri("gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py")
                .properties(Map.of("spark.logConf", "true"))
                .build())
            .build());
        ctx.export("sparkStatus", spark.statuses().applyValue(statuses -> statuses[0].state()));
        ctx.export("pysparkStatus", pyspark.statuses().applyValue(statuses -> statuses[0].state()));
    }
}
resources:
  mycluster:
    type: gcp:dataproc:Cluster
    properties:
      name: dproc-cluster-unique-name
      region: us-central1
  # Submit an example spark job to a dataproc cluster
  spark:
    type: gcp:dataproc:Job
    properties:
      region: ${mycluster.region}
      forceDelete: true
      placement:
        clusterName: ${mycluster.name}
      sparkConfig:
        mainClass: org.apache.spark.examples.SparkPi
        jarFileUris:
          - file:///usr/lib/spark/examples/jars/spark-examples.jar
        args:
          - '1000'
        properties:
          spark.logConf: 'true'
        loggingConfig:
          driverLogLevels:
            root: INFO
  # Submit an example pyspark job to a dataproc cluster
  pyspark:
    type: gcp:dataproc:Job
    properties:
      region: ${mycluster.region}
      forceDelete: true
      placement:
        clusterName: ${mycluster.name}
      pysparkConfig:
        mainPythonFileUri: gs://dataproc-examples-2f10d78d114f6aaec76462e3c310f31f/src/pyspark/hello-world/hello-world.py
        properties:
          spark.logConf: 'true'
outputs:
  # Check out current state of the jobs
  sparkStatus: ${spark.statuses[0].state}
  pysparkStatus: ${pyspark.statuses[0].state}
Create Job Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Job(name: string, args: JobArgs, opts?: CustomResourceOptions);@overload
def Job(resource_name: str,
        args: JobArgs,
        opts: Optional[ResourceOptions] = None)
@overload
def Job(resource_name: str,
        opts: Optional[ResourceOptions] = None,
        placement: Optional[JobPlacementArgs] = None,
        presto_config: Optional[JobPrestoConfigArgs] = None,
        hive_config: Optional[JobHiveConfigArgs] = None,
        labels: Optional[Mapping[str, str]] = None,
        pig_config: Optional[JobPigConfigArgs] = None,
        hadoop_config: Optional[JobHadoopConfigArgs] = None,
        force_delete: Optional[bool] = None,
        project: Optional[str] = None,
        pyspark_config: Optional[JobPysparkConfigArgs] = None,
        reference: Optional[JobReferenceArgs] = None,
        region: Optional[str] = None,
        scheduling: Optional[JobSchedulingArgs] = None,
        spark_config: Optional[JobSparkConfigArgs] = None,
        sparksql_config: Optional[JobSparksqlConfigArgs] = None)func NewJob(ctx *Context, name string, args JobArgs, opts ...ResourceOption) (*Job, error)public Job(string name, JobArgs args, CustomResourceOptions? opts = null)type: gcp:dataproc:Job
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args JobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args JobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args JobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args JobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args JobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var examplejobResourceResourceFromDataprocjob = new Gcp.Dataproc.Job("examplejobResourceResourceFromDataprocjob", new()
{
    Placement = new Gcp.Dataproc.Inputs.JobPlacementArgs
    {
        ClusterName = "string",
        ClusterUuid = "string",
    },
    PrestoConfig = new Gcp.Dataproc.Inputs.JobPrestoConfigArgs
    {
        ClientTags = new[]
        {
            "string",
        },
        ContinueOnFailure = false,
        LoggingConfig = new Gcp.Dataproc.Inputs.JobPrestoConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        OutputFormat = "string",
        Properties = 
        {
            { "string", "string" },
        },
        QueryFileUri = "string",
        QueryLists = new[]
        {
            "string",
        },
    },
    HiveConfig = new Gcp.Dataproc.Inputs.JobHiveConfigArgs
    {
        ContinueOnFailure = false,
        JarFileUris = new[]
        {
            "string",
        },
        Properties = 
        {
            { "string", "string" },
        },
        QueryFileUri = "string",
        QueryLists = new[]
        {
            "string",
        },
        ScriptVariables = 
        {
            { "string", "string" },
        },
    },
    Labels = 
    {
        { "string", "string" },
    },
    PigConfig = new Gcp.Dataproc.Inputs.JobPigConfigArgs
    {
        ContinueOnFailure = false,
        JarFileUris = new[]
        {
            "string",
        },
        LoggingConfig = new Gcp.Dataproc.Inputs.JobPigConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        Properties = 
        {
            { "string", "string" },
        },
        QueryFileUri = "string",
        QueryLists = new[]
        {
            "string",
        },
        ScriptVariables = 
        {
            { "string", "string" },
        },
    },
    HadoopConfig = new Gcp.Dataproc.Inputs.JobHadoopConfigArgs
    {
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        JarFileUris = new[]
        {
            "string",
        },
        LoggingConfig = new Gcp.Dataproc.Inputs.JobHadoopConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        MainClass = "string",
        MainJarFileUri = "string",
        Properties = 
        {
            { "string", "string" },
        },
    },
    ForceDelete = false,
    Project = "string",
    PysparkConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigArgs
    {
        MainPythonFileUri = "string",
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        JarFileUris = new[]
        {
            "string",
        },
        LoggingConfig = new Gcp.Dataproc.Inputs.JobPysparkConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        Properties = 
        {
            { "string", "string" },
        },
        PythonFileUris = new[]
        {
            "string",
        },
    },
    Reference = new Gcp.Dataproc.Inputs.JobReferenceArgs
    {
        JobId = "string",
    },
    Region = "string",
    Scheduling = new Gcp.Dataproc.Inputs.JobSchedulingArgs
    {
        MaxFailuresPerHour = 0,
        MaxFailuresTotal = 0,
    },
    SparkConfig = new Gcp.Dataproc.Inputs.JobSparkConfigArgs
    {
        ArchiveUris = new[]
        {
            "string",
        },
        Args = new[]
        {
            "string",
        },
        FileUris = new[]
        {
            "string",
        },
        JarFileUris = new[]
        {
            "string",
        },
        LoggingConfig = new Gcp.Dataproc.Inputs.JobSparkConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        MainClass = "string",
        MainJarFileUri = "string",
        Properties = 
        {
            { "string", "string" },
        },
    },
    SparksqlConfig = new Gcp.Dataproc.Inputs.JobSparksqlConfigArgs
    {
        JarFileUris = new[]
        {
            "string",
        },
        LoggingConfig = new Gcp.Dataproc.Inputs.JobSparksqlConfigLoggingConfigArgs
        {
            DriverLogLevels = 
            {
                { "string", "string" },
            },
        },
        Properties = 
        {
            { "string", "string" },
        },
        QueryFileUri = "string",
        QueryLists = new[]
        {
            "string",
        },
        ScriptVariables = 
        {
            { "string", "string" },
        },
    },
});
example, err := dataproc.NewJob(ctx, "examplejobResourceResourceFromDataprocjob", &dataproc.JobArgs{
	Placement: &dataproc.JobPlacementArgs{
		ClusterName: pulumi.String("string"),
		ClusterUuid: pulumi.String("string"),
	},
	PrestoConfig: &dataproc.JobPrestoConfigArgs{
		ClientTags: pulumi.StringArray{
			pulumi.String("string"),
		},
		ContinueOnFailure: pulumi.Bool(false),
		LoggingConfig: &dataproc.JobPrestoConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		OutputFormat: pulumi.String("string"),
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		QueryFileUri: pulumi.String("string"),
		QueryLists: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	HiveConfig: &dataproc.JobHiveConfigArgs{
		ContinueOnFailure: pulumi.Bool(false),
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		QueryFileUri: pulumi.String("string"),
		QueryLists: pulumi.StringArray{
			pulumi.String("string"),
		},
		ScriptVariables: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	PigConfig: &dataproc.JobPigConfigArgs{
		ContinueOnFailure: pulumi.Bool(false),
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		LoggingConfig: &dataproc.JobPigConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		QueryFileUri: pulumi.String("string"),
		QueryLists: pulumi.StringArray{
			pulumi.String("string"),
		},
		ScriptVariables: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	HadoopConfig: &dataproc.JobHadoopConfigArgs{
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		LoggingConfig: &dataproc.JobHadoopConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		MainClass:      pulumi.String("string"),
		MainJarFileUri: pulumi.String("string"),
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	ForceDelete: pulumi.Bool(false),
	Project:     pulumi.String("string"),
	PysparkConfig: &dataproc.JobPysparkConfigArgs{
		MainPythonFileUri: pulumi.String("string"),
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		LoggingConfig: &dataproc.JobPysparkConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		PythonFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
	},
	Reference: &dataproc.JobReferenceArgs{
		JobId: pulumi.String("string"),
	},
	Region: pulumi.String("string"),
	Scheduling: &dataproc.JobSchedulingArgs{
		MaxFailuresPerHour: pulumi.Int(0),
		MaxFailuresTotal:   pulumi.Int(0),
	},
	SparkConfig: &dataproc.JobSparkConfigArgs{
		ArchiveUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		Args: pulumi.StringArray{
			pulumi.String("string"),
		},
		FileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		LoggingConfig: &dataproc.JobSparkConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		MainClass:      pulumi.String("string"),
		MainJarFileUri: pulumi.String("string"),
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	SparksqlConfig: &dataproc.JobSparksqlConfigArgs{
		JarFileUris: pulumi.StringArray{
			pulumi.String("string"),
		},
		LoggingConfig: &dataproc.JobSparksqlConfigLoggingConfigArgs{
			DriverLogLevels: pulumi.StringMap{
				"string": pulumi.String("string"),
			},
		},
		Properties: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		QueryFileUri: pulumi.String("string"),
		QueryLists: pulumi.StringArray{
			pulumi.String("string"),
		},
		ScriptVariables: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
})
var examplejobResourceResourceFromDataprocjob = new Job("examplejobResourceResourceFromDataprocjob", JobArgs.builder()
    .placement(JobPlacementArgs.builder()
        .clusterName("string")
        .clusterUuid("string")
        .build())
    .prestoConfig(JobPrestoConfigArgs.builder()
        .clientTags("string")
        .continueOnFailure(false)
        .loggingConfig(JobPrestoConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .outputFormat("string")
        .properties(Map.of("string", "string"))
        .queryFileUri("string")
        .queryLists("string")
        .build())
    .hiveConfig(JobHiveConfigArgs.builder()
        .continueOnFailure(false)
        .jarFileUris("string")
        .properties(Map.of("string", "string"))
        .queryFileUri("string")
        .queryLists("string")
        .scriptVariables(Map.of("string", "string"))
        .build())
    .labels(Map.of("string", "string"))
    .pigConfig(JobPigConfigArgs.builder()
        .continueOnFailure(false)
        .jarFileUris("string")
        .loggingConfig(JobPigConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .properties(Map.of("string", "string"))
        .queryFileUri("string")
        .queryLists("string")
        .scriptVariables(Map.of("string", "string"))
        .build())
    .hadoopConfig(JobHadoopConfigArgs.builder()
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .jarFileUris("string")
        .loggingConfig(JobHadoopConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .mainClass("string")
        .mainJarFileUri("string")
        .properties(Map.of("string", "string"))
        .build())
    .forceDelete(false)
    .project("string")
    .pysparkConfig(JobPysparkConfigArgs.builder()
        .mainPythonFileUri("string")
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .jarFileUris("string")
        .loggingConfig(JobPysparkConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .properties(Map.of("string", "string"))
        .pythonFileUris("string")
        .build())
    .reference(JobReferenceArgs.builder()
        .jobId("string")
        .build())
    .region("string")
    .scheduling(JobSchedulingArgs.builder()
        .maxFailuresPerHour(0)
        .maxFailuresTotal(0)
        .build())
    .sparkConfig(JobSparkConfigArgs.builder()
        .archiveUris("string")
        .args("string")
        .fileUris("string")
        .jarFileUris("string")
        .loggingConfig(JobSparkConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .mainClass("string")
        .mainJarFileUri("string")
        .properties(Map.of("string", "string"))
        .build())
    .sparksqlConfig(JobSparksqlConfigArgs.builder()
        .jarFileUris("string")
        .loggingConfig(JobSparksqlConfigLoggingConfigArgs.builder()
            .driverLogLevels(Map.of("string", "string"))
            .build())
        .properties(Map.of("string", "string"))
        .queryFileUri("string")
        .queryLists("string")
        .scriptVariables(Map.of("string", "string"))
        .build())
    .build());
examplejob_resource_resource_from_dataprocjob = gcp.dataproc.Job("examplejobResourceResourceFromDataprocjob",
    placement={
        "cluster_name": "string",
        "cluster_uuid": "string",
    },
    presto_config={
        "client_tags": ["string"],
        "continue_on_failure": False,
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "output_format": "string",
        "properties": {
            "string": "string",
        },
        "query_file_uri": "string",
        "query_lists": ["string"],
    },
    hive_config={
        "continue_on_failure": False,
        "jar_file_uris": ["string"],
        "properties": {
            "string": "string",
        },
        "query_file_uri": "string",
        "query_lists": ["string"],
        "script_variables": {
            "string": "string",
        },
    },
    labels={
        "string": "string",
    },
    pig_config={
        "continue_on_failure": False,
        "jar_file_uris": ["string"],
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "properties": {
            "string": "string",
        },
        "query_file_uri": "string",
        "query_lists": ["string"],
        "script_variables": {
            "string": "string",
        },
    },
    hadoop_config={
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
        "jar_file_uris": ["string"],
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "main_class": "string",
        "main_jar_file_uri": "string",
        "properties": {
            "string": "string",
        },
    },
    force_delete=False,
    project="string",
    pyspark_config={
        "main_python_file_uri": "string",
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
        "jar_file_uris": ["string"],
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "properties": {
            "string": "string",
        },
        "python_file_uris": ["string"],
    },
    reference={
        "job_id": "string",
    },
    region="string",
    scheduling={
        "max_failures_per_hour": 0,
        "max_failures_total": 0,
    },
    spark_config={
        "archive_uris": ["string"],
        "args": ["string"],
        "file_uris": ["string"],
        "jar_file_uris": ["string"],
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "main_class": "string",
        "main_jar_file_uri": "string",
        "properties": {
            "string": "string",
        },
    },
    sparksql_config={
        "jar_file_uris": ["string"],
        "logging_config": {
            "driver_log_levels": {
                "string": "string",
            },
        },
        "properties": {
            "string": "string",
        },
        "query_file_uri": "string",
        "query_lists": ["string"],
        "script_variables": {
            "string": "string",
        },
    })
const examplejobResourceResourceFromDataprocjob = new gcp.dataproc.Job("examplejobResourceResourceFromDataprocjob", {
    placement: {
        clusterName: "string",
        clusterUuid: "string",
    },
    prestoConfig: {
        clientTags: ["string"],
        continueOnFailure: false,
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        outputFormat: "string",
        properties: {
            string: "string",
        },
        queryFileUri: "string",
        queryLists: ["string"],
    },
    hiveConfig: {
        continueOnFailure: false,
        jarFileUris: ["string"],
        properties: {
            string: "string",
        },
        queryFileUri: "string",
        queryLists: ["string"],
        scriptVariables: {
            string: "string",
        },
    },
    labels: {
        string: "string",
    },
    pigConfig: {
        continueOnFailure: false,
        jarFileUris: ["string"],
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        properties: {
            string: "string",
        },
        queryFileUri: "string",
        queryLists: ["string"],
        scriptVariables: {
            string: "string",
        },
    },
    hadoopConfig: {
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
        jarFileUris: ["string"],
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        mainClass: "string",
        mainJarFileUri: "string",
        properties: {
            string: "string",
        },
    },
    forceDelete: false,
    project: "string",
    pysparkConfig: {
        mainPythonFileUri: "string",
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
        jarFileUris: ["string"],
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        properties: {
            string: "string",
        },
        pythonFileUris: ["string"],
    },
    reference: {
        jobId: "string",
    },
    region: "string",
    scheduling: {
        maxFailuresPerHour: 0,
        maxFailuresTotal: 0,
    },
    sparkConfig: {
        archiveUris: ["string"],
        args: ["string"],
        fileUris: ["string"],
        jarFileUris: ["string"],
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        mainClass: "string",
        mainJarFileUri: "string",
        properties: {
            string: "string",
        },
    },
    sparksqlConfig: {
        jarFileUris: ["string"],
        loggingConfig: {
            driverLogLevels: {
                string: "string",
            },
        },
        properties: {
            string: "string",
        },
        queryFileUri: "string",
        queryLists: ["string"],
        scriptVariables: {
            string: "string",
        },
    },
});
type: gcp:dataproc:Job
properties:
    forceDelete: false
    hadoopConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        jarFileUris:
            - string
        loggingConfig:
            driverLogLevels:
                string: string
        mainClass: string
        mainJarFileUri: string
        properties:
            string: string
    hiveConfig:
        continueOnFailure: false
        jarFileUris:
            - string
        properties:
            string: string
        queryFileUri: string
        queryLists:
            - string
        scriptVariables:
            string: string
    labels:
        string: string
    pigConfig:
        continueOnFailure: false
        jarFileUris:
            - string
        loggingConfig:
            driverLogLevels:
                string: string
        properties:
            string: string
        queryFileUri: string
        queryLists:
            - string
        scriptVariables:
            string: string
    placement:
        clusterName: string
        clusterUuid: string
    prestoConfig:
        clientTags:
            - string
        continueOnFailure: false
        loggingConfig:
            driverLogLevels:
                string: string
        outputFormat: string
        properties:
            string: string
        queryFileUri: string
        queryLists:
            - string
    project: string
    pysparkConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        jarFileUris:
            - string
        loggingConfig:
            driverLogLevels:
                string: string
        mainPythonFileUri: string
        properties:
            string: string
        pythonFileUris:
            - string
    reference:
        jobId: string
    region: string
    scheduling:
        maxFailuresPerHour: 0
        maxFailuresTotal: 0
    sparkConfig:
        archiveUris:
            - string
        args:
            - string
        fileUris:
            - string
        jarFileUris:
            - string
        loggingConfig:
            driverLogLevels:
                string: string
        mainClass: string
        mainJarFileUri: string
        properties:
            string: string
    sparksqlConfig:
        jarFileUris:
            - string
        loggingConfig:
            driverLogLevels:
                string: string
        properties:
            string: string
        queryFileUri: string
        queryLists:
            - string
        scriptVariables:
            string: string
Job Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Job resource accepts the following input properties:
- Placement
JobPlacement 
- The config of job placement.
- ForceDelete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- HadoopConfig JobHadoop Config 
- The config of Hadoop job
- HiveConfig JobHive Config 
- The config of hive job
- Labels Dictionary<string, string>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- PigConfig JobPig Config 
- The config of pag job.
- PrestoConfig JobPresto Config 
- The config of presto job
- Project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- PysparkConfig JobPyspark Config 
- The config of pySpark job.
- Reference
JobReference 
- The reference of the job
- Region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- Scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- SparkConfig JobSpark Config 
- The config of the Spark job.
- SparksqlConfig JobSparksql Config 
- The config of SparkSql job
- Placement
JobPlacement Args 
- The config of job placement.
- ForceDelete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- HadoopConfig JobHadoop Config Args 
- The config of Hadoop job
- HiveConfig JobHive Config Args 
- The config of hive job
- Labels map[string]string
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- PigConfig JobPig Config Args 
- The config of pag job.
- PrestoConfig JobPresto Config Args 
- The config of presto job
- Project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- PysparkConfig JobPyspark Config Args 
- The config of pySpark job.
- Reference
JobReference Args 
- The reference of the job
- Region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- Scheduling
JobScheduling Args 
- Optional. Job scheduling configuration.
- SparkConfig JobSpark Config Args 
- The config of the Spark job.
- SparksqlConfig JobSparksql Config Args 
- The config of SparkSql job
- placement
JobPlacement 
- The config of job placement.
- forceDelete Boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig JobHadoop Config 
- The config of Hadoop job
- hiveConfig JobHive Config 
- The config of hive job
- labels Map<String,String>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig JobPig Config 
- The config of pag job.
- prestoConfig JobPresto Config 
- The config of presto job
- project String
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pysparkConfig JobPyspark Config 
- The config of pySpark job.
- reference
JobReference 
- The reference of the job
- region String
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- sparkConfig JobSpark Config 
- The config of the Spark job.
- sparksqlConfig JobSparksql Config 
- The config of SparkSql job
- placement
JobPlacement 
- The config of job placement.
- forceDelete boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig JobHadoop Config 
- The config of Hadoop job
- hiveConfig JobHive Config 
- The config of hive job
- labels {[key: string]: string}
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig JobPig Config 
- The config of pag job.
- prestoConfig JobPresto Config 
- The config of presto job
- project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pysparkConfig JobPyspark Config 
- The config of pySpark job.
- reference
JobReference 
- The reference of the job
- region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- sparkConfig JobSpark Config 
- The config of the Spark job.
- sparksqlConfig JobSparksql Config 
- The config of SparkSql job
- placement
JobPlacement Args 
- The config of job placement.
- force_delete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoop_config JobHadoop Config Args 
- The config of Hadoop job
- hive_config JobHive Config Args 
- The config of hive job
- labels Mapping[str, str]
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pig_config JobPig Config Args 
- The config of pag job.
- presto_config JobPresto Config Args 
- The config of presto job
- project str
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pyspark_config JobPyspark Config Args 
- The config of pySpark job.
- reference
JobReference Args 
- The reference of the job
- region str
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling Args 
- Optional. Job scheduling configuration.
- spark_config JobSpark Config Args 
- The config of the Spark job.
- sparksql_config JobSparksql Config Args 
- The config of SparkSql job
- placement Property Map
- The config of job placement.
- forceDelete Boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig Property Map
- The config of Hadoop job
- hiveConfig Property Map
- The config of hive job
- labels Map<String>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig Property Map
- The config of pag job.
- prestoConfig Property Map
- The config of presto job
- project String
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pysparkConfig Property Map
- The config of pySpark job.
- reference Property Map
- The reference of the job
- region String
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling Property Map
- Optional. Job scheduling configuration.
- sparkConfig Property Map
- The config of the Spark job.
- sparksqlConfig Property Map
- The config of SparkSql job
Outputs
All input properties are implicitly available as output properties. Additionally, the Job resource produces the following output properties:
- DriverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- DriverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- Id string
- The provider-assigned unique ID for this managed resource.
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Statuses
List<JobStatus> 
- The status of the job.
- DriverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- DriverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- Id string
- The provider-assigned unique ID for this managed resource.
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Statuses
[]JobStatus 
- The status of the job.
- driverControls StringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput StringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- id String
- The provider-assigned unique ID for this managed resource.
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- statuses
List<JobStatus> 
- The status of the job.
- driverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- id string
- The provider-assigned unique ID for this managed resource.
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- statuses
JobStatus[] 
- The status of the job.
- driver_controls_ strfiles_ uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driver_output_ strresource_ uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- id str
- The provider-assigned unique ID for this managed resource.
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- statuses
Sequence[JobStatus] 
- The status of the job.
- driverControls StringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput StringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- id String
- The provider-assigned unique ID for this managed resource.
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- statuses List<Property Map>
- The status of the job.
Look up Existing Job Resource
Get an existing Job resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: JobState, opts?: CustomResourceOptions): Job@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        driver_controls_files_uri: Optional[str] = None,
        driver_output_resource_uri: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        force_delete: Optional[bool] = None,
        hadoop_config: Optional[JobHadoopConfigArgs] = None,
        hive_config: Optional[JobHiveConfigArgs] = None,
        labels: Optional[Mapping[str, str]] = None,
        pig_config: Optional[JobPigConfigArgs] = None,
        placement: Optional[JobPlacementArgs] = None,
        presto_config: Optional[JobPrestoConfigArgs] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        pyspark_config: Optional[JobPysparkConfigArgs] = None,
        reference: Optional[JobReferenceArgs] = None,
        region: Optional[str] = None,
        scheduling: Optional[JobSchedulingArgs] = None,
        spark_config: Optional[JobSparkConfigArgs] = None,
        sparksql_config: Optional[JobSparksqlConfigArgs] = None,
        statuses: Optional[Sequence[JobStatusArgs]] = None) -> Jobfunc GetJob(ctx *Context, name string, id IDInput, state *JobState, opts ...ResourceOption) (*Job, error)public static Job Get(string name, Input<string> id, JobState? state, CustomResourceOptions? opts = null)public static Job get(String name, Output<String> id, JobState state, CustomResourceOptions options)resources:  _:    type: gcp:dataproc:Job    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- DriverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- DriverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- ForceDelete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- HadoopConfig JobHadoop Config 
- The config of Hadoop job
- HiveConfig JobHive Config 
- The config of hive job
- Labels Dictionary<string, string>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- PigConfig JobPig Config 
- The config of pag job.
- Placement
JobPlacement 
- The config of job placement.
- PrestoConfig JobPresto Config 
- The config of presto job
- Project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- PysparkConfig JobPyspark Config 
- The config of pySpark job.
- Reference
JobReference 
- The reference of the job
- Region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- Scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- SparkConfig JobSpark Config 
- The config of the Spark job.
- SparksqlConfig JobSparksql Config 
- The config of SparkSql job
- Statuses
List<JobStatus> 
- The status of the job.
- DriverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- DriverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- ForceDelete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- HadoopConfig JobHadoop Config Args 
- The config of Hadoop job
- HiveConfig JobHive Config Args 
- The config of hive job
- Labels map[string]string
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- PigConfig JobPig Config Args 
- The config of pag job.
- Placement
JobPlacement Args 
- The config of job placement.
- PrestoConfig JobPresto Config Args 
- The config of presto job
- Project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- PysparkConfig JobPyspark Config Args 
- The config of pySpark job.
- Reference
JobReference Args 
- The reference of the job
- Region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- Scheduling
JobScheduling Args 
- Optional. Job scheduling configuration.
- SparkConfig JobSpark Config Args 
- The config of the Spark job.
- SparksqlConfig JobSparksql Config Args 
- The config of SparkSql job
- Statuses
[]JobStatus Args 
- The status of the job.
- driverControls StringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput StringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- forceDelete Boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig JobHadoop Config 
- The config of Hadoop job
- hiveConfig JobHive Config 
- The config of hive job
- labels Map<String,String>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig JobPig Config 
- The config of pag job.
- placement
JobPlacement 
- The config of job placement.
- prestoConfig JobPresto Config 
- The config of presto job
- project String
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkConfig JobPyspark Config 
- The config of pySpark job.
- reference
JobReference 
- The reference of the job
- region String
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- sparkConfig JobSpark Config 
- The config of the Spark job.
- sparksqlConfig JobSparksql Config 
- The config of SparkSql job
- statuses
List<JobStatus> 
- The status of the job.
- driverControls stringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput stringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- forceDelete boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig JobHadoop Config 
- The config of Hadoop job
- hiveConfig JobHive Config 
- The config of hive job
- labels {[key: string]: string}
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig JobPig Config 
- The config of pag job.
- placement
JobPlacement 
- The config of job placement.
- prestoConfig JobPresto Config 
- The config of presto job
- project string
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkConfig JobPyspark Config 
- The config of pySpark job.
- reference
JobReference 
- The reference of the job
- region string
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling 
- Optional. Job scheduling configuration.
- sparkConfig JobSpark Config 
- The config of the Spark job.
- sparksqlConfig JobSparksql Config 
- The config of SparkSql job
- statuses
JobStatus[] 
- The status of the job.
- driver_controls_ strfiles_ uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driver_output_ strresource_ uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- force_delete bool
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoop_config JobHadoop Config Args 
- The config of Hadoop job
- hive_config JobHive Config Args 
- The config of hive job
- labels Mapping[str, str]
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pig_config JobPig Config Args 
- The config of pag job.
- placement
JobPlacement Args 
- The config of job placement.
- presto_config JobPresto Config Args 
- The config of presto job
- project str
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pyspark_config JobPyspark Config Args 
- The config of pySpark job.
- reference
JobReference Args 
- The reference of the job
- region str
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling
JobScheduling Args 
- Optional. Job scheduling configuration.
- spark_config JobSpark Config Args 
- The config of the Spark job.
- sparksql_config JobSparksql Config Args 
- The config of SparkSql job
- statuses
Sequence[JobStatus Args] 
- The status of the job.
- driverControls StringFiles Uri 
- If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same location as driver_output_uri.
- driverOutput StringResource Uri 
- A URI pointing to the location of the stdout of the job's driver program.
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services. - scheduling.max_failures_per_hour- (Required) Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- scheduling.max_failures_total- (Required) Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
 
- forceDelete Boolean
- By default, you can only delete inactive jobs within Dataproc. Setting this to true, and calling destroy, will ensure that the job is first cancelled before issuing the delete.
- hadoopConfig Property Map
- The config of Hadoop job
- hiveConfig Property Map
- The config of hive job
- labels Map<String>
- The list of labels (key/value pairs) to add to the job. Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field 'effective_labels' for all of the labels present on the resource.
- pigConfig Property Map
- The config of pag job.
- placement Property Map
- The config of job placement.
- prestoConfig Property Map
- The config of presto job
- project String
- The project in which the clustercan be found and jobs subsequently run against. If it is not provided, the provider project is used.
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- pysparkConfig Property Map
- The config of pySpark job.
- reference Property Map
- The reference of the job
- region String
- The Cloud Dataproc region. This essentially determines which clusters are available
for this job to be submitted to. If not specified, defaults to global.
- scheduling Property Map
- Optional. Job scheduling configuration.
- sparkConfig Property Map
- The config of the Spark job.
- sparksqlConfig Property Map
- The config of SparkSql job
- statuses List<Property Map>
- The status of the job.
Supporting Types
JobHadoopConfig, JobHadoopConfigArgs      
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris List<string>
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- LoggingConfig JobHadoop Config Logging Config 
- The runtime logging config of the job
- MainClass string
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- MainJar stringFile Uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- Properties Dictionary<string, string>
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- ArchiveUris []string
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- FileUris []string
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- LoggingConfig JobHadoop Config Logging Config 
- The runtime logging config of the job
- MainClass string
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- MainJar stringFile Uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- Properties map[string]string
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig JobHadoop Config Logging Config 
- The runtime logging config of the job
- mainClass String
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- mainJar StringFile Uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- properties Map<String,String>
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris string[]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris string[]
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig JobHadoop Config Logging Config 
- The runtime logging config of the job
- mainClass string
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- mainJar stringFile Uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- properties {[key: string]: string}
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- file_uris Sequence[str]
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- logging_config JobHadoop Config Logging Config 
- The runtime logging config of the job
- main_class str
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- main_jar_ strfile_ uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- properties Mapping[str, str]
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an incorrect job submission.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Hadoop drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig Property Map
- The runtime logging config of the job
- mainClass String
- The name of the driver's main class. The jar file containing the class must be in the default CLASSPATH or specified in jar_file_uris. Conflicts withmain_jar_file_uri
- mainJar StringFile Uri 
- The HCFS URI of the jar file containing the main class. Examples: 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' 'hdfs:/tmp/test-samples/custom-wordcount.jar' 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar'. Conflicts with main_class
- properties Map<String>
- A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-siteand classes in user code..- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
JobHadoopConfigLoggingConfig, JobHadoopConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobHiveConfig, JobHiveConfigArgs      
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- Properties Dictionary<string, string>
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- QueryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- QueryLists List<string>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables Dictionary<string, string>
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- Properties map[string]string
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- QueryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- QueryLists []string
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables map[string]string
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- properties Map<String,String>
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- queryFile StringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists List<String>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String,String>
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
- continueOn booleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- properties {[key: string]: string}
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- queryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists string[]
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables {[key: string]: string}
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
- continue_on_ boolfailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- properties Mapping[str, str]
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- query_file_ struri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- query_lists Sequence[str]
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- script_variables Mapping[str, str]
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes and UDFs.
- properties Map<String>
- A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/hive/conf/hive-site.xml, and classes in user code..
- queryFile StringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists List<String>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String>
- Mapping of query variable names to values (equivalent to the Hive command: SET name="value";).
JobPigConfig, JobPigConfigArgs      
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- LoggingConfig JobPig Config Logging Config 
- The runtime logging config of the job
- Properties Dictionary<string, string>
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- QueryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- QueryLists List<string>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables Dictionary<string, string>
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- LoggingConfig JobPig Config Logging Config 
- The runtime logging config of the job
- Properties map[string]string
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- QueryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- QueryLists []string
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables map[string]string
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig JobPig Config Logging Config 
- The runtime logging config of the job
- properties Map<String,String>
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- queryFile StringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists List<String>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String,String>
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
- continueOn booleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig JobPig Config Logging Config 
- The runtime logging config of the job
- properties {[key: string]: string}
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- queryFile stringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists string[]
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables {[key: string]: string}
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
- continue_on_ boolfailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- logging_config JobPig Config Logging Config 
- The runtime logging config of the job
- properties Mapping[str, str]
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- query_file_ struri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- query_lists Sequence[str]
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- script_variables Mapping[str, str]
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. The default value is false. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig Property Map
- The runtime logging config of the job
- properties Map<String>
- A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml,/etc/pig/conf/pig.properties, and classes in user code.
- queryFile StringUri 
- HCFS URI of file containing Hive script to execute as the job.
Conflicts with query_list
- queryLists List<String>
- The list of Hive queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String>
- Mapping of query variable names to values (equivalent to the Pig command: name=[value]).
JobPigConfigLoggingConfig, JobPigConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobPlacement, JobPlacementArgs    
- ClusterName string
- The name of the cluster where the job will be submitted
- ClusterUuid string
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
- ClusterName string
- The name of the cluster where the job will be submitted
- ClusterUuid string
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
- clusterName String
- The name of the cluster where the job will be submitted
- clusterUuid String
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
- clusterName string
- The name of the cluster where the job will be submitted
- clusterUuid string
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
- cluster_name str
- The name of the cluster where the job will be submitted
- cluster_uuid str
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
- clusterName String
- The name of the cluster where the job will be submitted
- clusterUuid String
- Output-only. A cluster UUID generated by the Cloud Dataproc service when the job is submitted
JobPrestoConfig, JobPrestoConfigArgs      
- List<string>
- Presto client tags to attach to this query.
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- LoggingConfig JobPresto Config Logging Config 
- The runtime logging config of the job
- OutputFormat string
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- Properties Dictionary<string, string>
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- QueryLists List<string>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- []string
- Presto client tags to attach to this query.
- ContinueOn boolFailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- LoggingConfig JobPresto Config Logging Config 
- The runtime logging config of the job
- OutputFormat string
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- Properties map[string]string
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- QueryLists []string
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- List<String>
- Presto client tags to attach to this query.
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- loggingConfig JobPresto Config Logging Config 
- The runtime logging config of the job
- outputFormat String
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- properties Map<String,String>
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists List<String>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- string[]
- Presto client tags to attach to this query.
- continueOn booleanFailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- loggingConfig JobPresto Config Logging Config 
- The runtime logging config of the job
- outputFormat string
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- properties {[key: string]: string}
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- queryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists string[]
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- Sequence[str]
- Presto client tags to attach to this query.
- continue_on_ boolfailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- logging_config JobPresto Config Logging Config 
- The runtime logging config of the job
- output_format str
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- properties Mapping[str, str]
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- query_file_ struri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- query_lists Sequence[str]
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- List<String>
- Presto client tags to attach to this query.
- continueOn BooleanFailure 
- Whether to continue executing queries if a query fails. Setting to true can be useful when executing independent parallel queries. Defaults to false.
- loggingConfig Property Map
- The runtime logging config of the job
- outputFormat String
- The format in which query output will be displayed. See the Presto documentation for supported output formats.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- properties Map<String>
- A mapping of property names to values. Used to set Presto session properties Equivalent to using the --session flag in the Presto CLI.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists List<String>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
JobPrestoConfigLoggingConfig, JobPrestoConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobPysparkConfig, JobPysparkConfigArgs      
- MainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver.
- FileUris List<string>
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- LoggingConfig JobPyspark Config Logging Config 
- The runtime logging config of the job
- Properties Dictionary<string, string>
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- PythonFile List<string>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- MainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- ArchiveUris []string
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver.
- FileUris []string
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- LoggingConfig JobPyspark Config Logging Config 
- The runtime logging config of the job
- Properties map[string]string
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- PythonFile []stringUris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython StringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- loggingConfig JobPyspark Config Logging Config 
- The runtime logging config of the job
- properties Map<String,String>
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- pythonFile List<String>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython stringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris string[]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver.
- fileUris string[]
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- loggingConfig JobPyspark Config Logging Config 
- The runtime logging config of the job
- properties {[key: string]: string}
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- pythonFile string[]Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- main_python_ strfile_ uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver.
- file_uris Sequence[str]
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- logging_config JobPyspark Config Logging Config 
- The runtime logging config of the job
- properties Mapping[str, str]
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- python_file_ Sequence[str]uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
- mainPython StringFile Uri 
- The HCFS URI of the main Python file to use as the driver. Must be a .py file.
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Python drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Python driver and tasks.
- loggingConfig Property Map
- The runtime logging config of the job
- properties Map<String>
- A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- pythonFile List<String>Uris 
- HCFS file URIs of Python files to pass to the PySpark framework. Supported file types: .py, .egg, and .zip.
JobPysparkConfigLoggingConfig, JobPysparkConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobReference, JobReferenceArgs    
- JobId string
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
- JobId string
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
- jobId String
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
- jobId string
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
- job_id str
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
- jobId String
- The job ID, which must be unique within the project. The job ID is generated by the server upon job submission or provided by the user as a means to perform retries without creating duplicate jobs
JobScheduling, JobSchedulingArgs    
- MaxFailures intPer Hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- MaxFailures intTotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- MaxFailures intPer Hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- MaxFailures intTotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures IntegerPer Hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures IntegerTotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures numberPer Hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures numberTotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- max_failures_ intper_ hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- max_failures_ inttotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures NumberPer Hour 
- Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
- maxFailures NumberTotal 
- Maximum number of times in total a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.
JobSparkConfig, JobSparkConfigArgs      
- ArchiveUris List<string>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args List<string>
- The arguments to pass to the driver.
- FileUris List<string>
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile List<string>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- LoggingConfig JobSpark Config Logging Config 
- The runtime logging config of the job
- MainClass string
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- MainJar stringFile Uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- Properties Dictionary<string, string>
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- ArchiveUris []string
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- Args []string
- The arguments to pass to the driver.
- FileUris []string
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- JarFile []stringUris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- LoggingConfig JobSpark Config Logging Config 
- The runtime logging config of the job
- MainClass string
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- MainJar stringFile Uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- Properties map[string]string
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig JobSpark Config Logging Config 
- The runtime logging config of the job
- mainClass String
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- mainJar StringFile Uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- properties Map<String,String>
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris string[]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args string[]
- The arguments to pass to the driver.
- fileUris string[]
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile string[]Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig JobSpark Config Logging Config 
- The runtime logging config of the job
- mainClass string
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- mainJar stringFile Uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- properties {[key: string]: string}
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archive_uris Sequence[str]
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args Sequence[str]
- The arguments to pass to the driver.
- file_uris Sequence[str]
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- logging_config JobSpark Config Logging Config 
- The runtime logging config of the job
- main_class str
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- main_jar_ strfile_ uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- properties Mapping[str, str]
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- archiveUris List<String>
- HCFS URIs of archives to be extracted in the working directory of .jar, .tar, .tar.gz, .tgz, and .zip.
- args List<String>
- The arguments to pass to the driver.
- fileUris List<String>
- HCFS URIs of files to be copied to the working directory of Spark drivers and distributed tasks. Useful for naively parallel tasks.
- jarFile List<String>Uris 
- HCFS URIs of jar files to add to the CLASSPATHs of the Spark driver and tasks.
- loggingConfig Property Map
- The runtime logging config of the job
- mainClass String
- The class containing the main method of the driver. Must be in a
provided jar or jar that is already on the classpath. Conflicts with main_jar_file_uri
- mainJar StringFile Uri 
- The HCFS URI of jar file containing
the driver jar. Conflicts with main_class
- properties Map<String>
- A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Cloud Dataproc API may be overwritten. Can include properties set in /etc/spark/conf/spark-defaults.confand classes in user code.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
JobSparkConfigLoggingConfig, JobSparkConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobSparksqlConfig, JobSparksqlConfigArgs      
- JarFile List<string>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- LoggingConfig JobSparksql Config Logging Config 
- The runtime logging config of the job
- Properties Dictionary<string, string>
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- QueryLists List<string>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables Dictionary<string, string>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- JarFile []stringUris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- LoggingConfig JobSparksql Config Logging Config 
- The runtime logging config of the job
- Properties map[string]string
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- QueryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- QueryLists []string
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- ScriptVariables map[string]string
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile List<String>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig JobSparksql Config Logging Config 
- The runtime logging config of the job
- properties Map<String,String>
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists List<String>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String,String>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile string[]Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig JobSparksql Config Logging Config 
- The runtime logging config of the job
- properties {[key: string]: string}
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- queryFile stringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists string[]
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables {[key: string]: string}
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jar_file_ Sequence[str]uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- logging_config JobSparksql Config Logging Config 
- The runtime logging config of the job
- properties Mapping[str, str]
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- query_file_ struri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- query_lists Sequence[str]
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- script_variables Mapping[str, str]
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
- jarFile List<String>Uris 
- HCFS URIs of jar files to be added to the Spark CLASSPATH.- logging_config.driver_log_levels- (Required) The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'
 
- loggingConfig Property Map
- The runtime logging config of the job
- properties Map<String>
- A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Cloud Dataproc API may be overwritten.
- queryFile StringUri 
- The HCFS URI of the script that contains SQL queries.
Conflicts with query_list
- queryLists List<String>
- The list of SQL queries or statements to execute as part of the job.
Conflicts with query_file_uri
- scriptVariables Map<String>
- Mapping of query variable names to values (equivalent to the Spark SQL command: SET name="value";).
JobSparksqlConfigLoggingConfig, JobSparksqlConfigLoggingConfigArgs          
- DriverLog Dictionary<string, string>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- DriverLog map[string]stringLevels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String,String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog {[key: string]: string}Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driver_log_ Mapping[str, str]levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
- driverLog Map<String>Levels 
- Optional. The per-package log levels for the driver. This may include 'root' package name to configure rootLogger. Examples: 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG'.
JobStatus, JobStatusArgs    
- Details string
- Optional job state details, such as an error description if the state is ERROR.
- State string
- A state message specifying the overall job state.
- StateStart stringTime 
- The time when this state was entered.
- Substate string
- Additional state information, which includes status reported by the agent.
- Details string
- Optional job state details, such as an error description if the state is ERROR.
- State string
- A state message specifying the overall job state.
- StateStart stringTime 
- The time when this state was entered.
- Substate string
- Additional state information, which includes status reported by the agent.
- details String
- Optional job state details, such as an error description if the state is ERROR.
- state String
- A state message specifying the overall job state.
- stateStart StringTime 
- The time when this state was entered.
- substate String
- Additional state information, which includes status reported by the agent.
- details string
- Optional job state details, such as an error description if the state is ERROR.
- state string
- A state message specifying the overall job state.
- stateStart stringTime 
- The time when this state was entered.
- substate string
- Additional state information, which includes status reported by the agent.
- details str
- Optional job state details, such as an error description if the state is ERROR.
- state str
- A state message specifying the overall job state.
- state_start_ strtime 
- The time when this state was entered.
- substate str
- Additional state information, which includes status reported by the agent.
- details String
- Optional job state details, such as an error description if the state is ERROR.
- state String
- A state message specifying the overall job state.
- stateStart StringTime 
- The time when this state was entered.
- substate String
- Additional state information, which includes status reported by the agent.
Import
This resource does not support import.
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the google-betaTerraform Provider.