aws.sagemaker.DataQualityJobDefinition
Explore with Pulumi AI
Provides a SageMaker data quality job definition resource.
Example Usage
Basic usage:
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const test = new aws.sagemaker.DataQualityJobDefinition("test", {
    name: "my-data-quality-job-definition",
    dataQualityAppSpecification: {
        imageUri: monitor.registryPath,
    },
    dataQualityJobInput: {
        endpointInput: {
            endpointName: myEndpoint.name,
        },
    },
    dataQualityJobOutputConfig: {
        monitoringOutputs: {
            s3Output: {
                s3Uri: `https://${myBucket.bucketRegionalDomainName}/output`,
            },
        },
    },
    jobResources: {
        clusterConfig: {
            instanceCount: 1,
            instanceType: "ml.t3.medium",
            volumeSizeInGb: 20,
        },
    },
    roleArn: myRole.arn,
});
import pulumi
import pulumi_aws as aws
test = aws.sagemaker.DataQualityJobDefinition("test",
    name="my-data-quality-job-definition",
    data_quality_app_specification={
        "image_uri": monitor["registryPath"],
    },
    data_quality_job_input={
        "endpoint_input": {
            "endpoint_name": my_endpoint["name"],
        },
    },
    data_quality_job_output_config={
        "monitoring_outputs": {
            "s3_output": {
                "s3_uri": f"https://{my_bucket['bucketRegionalDomainName']}/output",
            },
        },
    },
    job_resources={
        "cluster_config": {
            "instance_count": 1,
            "instance_type": "ml.t3.medium",
            "volume_size_in_gb": 20,
        },
    },
    role_arn=my_role["arn"])
package main
import (
	"fmt"
	"github.com/pulumi/pulumi-aws/sdk/v6/go/aws/sagemaker"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := sagemaker.NewDataQualityJobDefinition(ctx, "test", &sagemaker.DataQualityJobDefinitionArgs{
			Name: pulumi.String("my-data-quality-job-definition"),
			DataQualityAppSpecification: &sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs{
				ImageUri: pulumi.Any(monitor.RegistryPath),
			},
			DataQualityJobInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs{
				EndpointInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs{
					EndpointName: pulumi.Any(myEndpoint.Name),
				},
			},
			DataQualityJobOutputConfig: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs{
				MonitoringOutputs: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs{
					S3Output: sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs{
						S3Uri: pulumi.Sprintf("https://%v/output", myBucket.BucketRegionalDomainName),
					},
				},
			},
			JobResources: &sagemaker.DataQualityJobDefinitionJobResourcesArgs{
				ClusterConfig: &sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs{
					InstanceCount:  pulumi.Int(1),
					InstanceType:   pulumi.String("ml.t3.medium"),
					VolumeSizeInGb: pulumi.Int(20),
				},
			},
			RoleArn: pulumi.Any(myRole.Arn),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var test = new Aws.Sagemaker.DataQualityJobDefinition("test", new()
    {
        Name = "my-data-quality-job-definition",
        DataQualityAppSpecification = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs
        {
            ImageUri = monitor.RegistryPath,
        },
        DataQualityJobInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs
        {
            EndpointInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs
            {
                EndpointName = myEndpoint.Name,
            },
        },
        DataQualityJobOutputConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs
        {
            MonitoringOutputs = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs
            {
                S3Output = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs
                {
                    S3Uri = $"https://{myBucket.BucketRegionalDomainName}/output",
                },
            },
        },
        JobResources = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs
        {
            ClusterConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs
            {
                InstanceCount = 1,
                InstanceType = "ml.t3.medium",
                VolumeSizeInGb = 20,
            },
        },
        RoleArn = myRole.Arn,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.sagemaker.DataQualityJobDefinition;
import com.pulumi.aws.sagemaker.DataQualityJobDefinitionArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesArgs;
import com.pulumi.aws.sagemaker.inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var test = new DataQualityJobDefinition("test", DataQualityJobDefinitionArgs.builder()
            .name("my-data-quality-job-definition")
            .dataQualityAppSpecification(DataQualityJobDefinitionDataQualityAppSpecificationArgs.builder()
                .imageUri(monitor.registryPath())
                .build())
            .dataQualityJobInput(DataQualityJobDefinitionDataQualityJobInputArgs.builder()
                .endpointInput(DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs.builder()
                    .endpointName(myEndpoint.name())
                    .build())
                .build())
            .dataQualityJobOutputConfig(DataQualityJobDefinitionDataQualityJobOutputConfigArgs.builder()
                .monitoringOutputs(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs.builder()
                    .s3Output(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs.builder()
                        .s3Uri(String.format("https://%s/output", myBucket.bucketRegionalDomainName()))
                        .build())
                    .build())
                .build())
            .jobResources(DataQualityJobDefinitionJobResourcesArgs.builder()
                .clusterConfig(DataQualityJobDefinitionJobResourcesClusterConfigArgs.builder()
                    .instanceCount(1)
                    .instanceType("ml.t3.medium")
                    .volumeSizeInGb(20)
                    .build())
                .build())
            .roleArn(myRole.arn())
            .build());
    }
}
resources:
  test:
    type: aws:sagemaker:DataQualityJobDefinition
    properties:
      name: my-data-quality-job-definition
      dataQualityAppSpecification:
        imageUri: ${monitor.registryPath}
      dataQualityJobInput:
        endpointInput:
          endpointName: ${myEndpoint.name}
      dataQualityJobOutputConfig:
        monitoringOutputs:
          s3Output:
            s3Uri: https://${myBucket.bucketRegionalDomainName}/output
      jobResources:
        clusterConfig:
          instanceCount: 1
          instanceType: ml.t3.medium
          volumeSizeInGb: 20
      roleArn: ${myRole.arn}
Create DataQualityJobDefinition Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DataQualityJobDefinition(name: string, args: DataQualityJobDefinitionArgs, opts?: CustomResourceOptions);@overload
def DataQualityJobDefinition(resource_name: str,
                             args: DataQualityJobDefinitionArgs,
                             opts: Optional[ResourceOptions] = None)
@overload
def DataQualityJobDefinition(resource_name: str,
                             opts: Optional[ResourceOptions] = None,
                             data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
                             data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
                             data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
                             job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
                             role_arn: Optional[str] = None,
                             data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
                             name: Optional[str] = None,
                             network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
                             stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
                             tags: Optional[Mapping[str, str]] = None)func NewDataQualityJobDefinition(ctx *Context, name string, args DataQualityJobDefinitionArgs, opts ...ResourceOption) (*DataQualityJobDefinition, error)public DataQualityJobDefinition(string name, DataQualityJobDefinitionArgs args, CustomResourceOptions? opts = null)
public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args)
public DataQualityJobDefinition(String name, DataQualityJobDefinitionArgs args, CustomResourceOptions options)
type: aws:sagemaker:DataQualityJobDefinition
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DataQualityJobDefinitionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DataQualityJobDefinitionArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DataQualityJobDefinitionArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DataQualityJobDefinitionArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DataQualityJobDefinitionArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var dataQualityJobDefinitionResource = new Aws.Sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource", new()
{
    DataQualityAppSpecification = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityAppSpecificationArgs
    {
        ImageUri = "string",
        Environment = 
        {
            { "string", "string" },
        },
        PostAnalyticsProcessorSourceUri = "string",
        RecordPreprocessorSourceUri = "string",
    },
    DataQualityJobInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputArgs
    {
        BatchTransformInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs
        {
            DataCapturedDestinationS3Uri = "string",
            DatasetFormat = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs
            {
                Csv = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs
                {
                    Header = false,
                },
                Json = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs
                {
                    Line = false,
                },
            },
            LocalPath = "string",
            S3DataDistributionType = "string",
            S3InputMode = "string",
        },
        EndpointInput = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs
        {
            EndpointName = "string",
            LocalPath = "string",
            S3DataDistributionType = "string",
            S3InputMode = "string",
        },
    },
    DataQualityJobOutputConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigArgs
    {
        MonitoringOutputs = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs
        {
            S3Output = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs
            {
                S3Uri = "string",
                LocalPath = "string",
                S3UploadMode = "string",
            },
        },
        KmsKeyId = "string",
    },
    JobResources = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesArgs
    {
        ClusterConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionJobResourcesClusterConfigArgs
        {
            InstanceCount = 0,
            InstanceType = "string",
            VolumeSizeInGb = 0,
            VolumeKmsKeyId = "string",
        },
    },
    RoleArn = "string",
    DataQualityBaselineConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigArgs
    {
        ConstraintsResource = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs
        {
            S3Uri = "string",
        },
        StatisticsResource = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs
        {
            S3Uri = "string",
        },
    },
    Name = "string",
    NetworkConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigArgs
    {
        EnableInterContainerTrafficEncryption = false,
        EnableNetworkIsolation = false,
        VpcConfig = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionNetworkConfigVpcConfigArgs
        {
            SecurityGroupIds = new[]
            {
                "string",
            },
            Subnets = new[]
            {
                "string",
            },
        },
    },
    StoppingCondition = new Aws.Sagemaker.Inputs.DataQualityJobDefinitionStoppingConditionArgs
    {
        MaxRuntimeInSeconds = 0,
    },
    Tags = 
    {
        { "string", "string" },
    },
});
example, err := sagemaker.NewDataQualityJobDefinition(ctx, "dataQualityJobDefinitionResource", &sagemaker.DataQualityJobDefinitionArgs{
	DataQualityAppSpecification: &sagemaker.DataQualityJobDefinitionDataQualityAppSpecificationArgs{
		ImageUri: pulumi.String("string"),
		Environment: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
		PostAnalyticsProcessorSourceUri: pulumi.String("string"),
		RecordPreprocessorSourceUri:     pulumi.String("string"),
	},
	DataQualityJobInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputArgs{
		BatchTransformInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs{
			DataCapturedDestinationS3Uri: pulumi.String("string"),
			DatasetFormat: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs{
				Csv: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs{
					Header: pulumi.Bool(false),
				},
				Json: &sagemaker.DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs{
					Line: pulumi.Bool(false),
				},
			},
			LocalPath:              pulumi.String("string"),
			S3DataDistributionType: pulumi.String("string"),
			S3InputMode:            pulumi.String("string"),
		},
		EndpointInput: &sagemaker.DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs{
			EndpointName:           pulumi.String("string"),
			LocalPath:              pulumi.String("string"),
			S3DataDistributionType: pulumi.String("string"),
			S3InputMode:            pulumi.String("string"),
		},
	},
	DataQualityJobOutputConfig: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigArgs{
		MonitoringOutputs: &sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs{
			S3Output: sagemaker.DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs{
				S3Uri:        pulumi.String("string"),
				LocalPath:    pulumi.String("string"),
				S3UploadMode: pulumi.String("string"),
			},
		},
		KmsKeyId: pulumi.String("string"),
	},
	JobResources: &sagemaker.DataQualityJobDefinitionJobResourcesArgs{
		ClusterConfig: &sagemaker.DataQualityJobDefinitionJobResourcesClusterConfigArgs{
			InstanceCount:  pulumi.Int(0),
			InstanceType:   pulumi.String("string"),
			VolumeSizeInGb: pulumi.Int(0),
			VolumeKmsKeyId: pulumi.String("string"),
		},
	},
	RoleArn: pulumi.String("string"),
	DataQualityBaselineConfig: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigArgs{
		ConstraintsResource: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs{
			S3Uri: pulumi.String("string"),
		},
		StatisticsResource: &sagemaker.DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs{
			S3Uri: pulumi.String("string"),
		},
	},
	Name: pulumi.String("string"),
	NetworkConfig: &sagemaker.DataQualityJobDefinitionNetworkConfigArgs{
		EnableInterContainerTrafficEncryption: pulumi.Bool(false),
		EnableNetworkIsolation:                pulumi.Bool(false),
		VpcConfig: &sagemaker.DataQualityJobDefinitionNetworkConfigVpcConfigArgs{
			SecurityGroupIds: pulumi.StringArray{
				pulumi.String("string"),
			},
			Subnets: pulumi.StringArray{
				pulumi.String("string"),
			},
		},
	},
	StoppingCondition: &sagemaker.DataQualityJobDefinitionStoppingConditionArgs{
		MaxRuntimeInSeconds: pulumi.Int(0),
	},
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
})
var dataQualityJobDefinitionResource = new DataQualityJobDefinition("dataQualityJobDefinitionResource", DataQualityJobDefinitionArgs.builder()
    .dataQualityAppSpecification(DataQualityJobDefinitionDataQualityAppSpecificationArgs.builder()
        .imageUri("string")
        .environment(Map.of("string", "string"))
        .postAnalyticsProcessorSourceUri("string")
        .recordPreprocessorSourceUri("string")
        .build())
    .dataQualityJobInput(DataQualityJobDefinitionDataQualityJobInputArgs.builder()
        .batchTransformInput(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs.builder()
            .dataCapturedDestinationS3Uri("string")
            .datasetFormat(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs.builder()
                .csv(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs.builder()
                    .header(false)
                    .build())
                .json(DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs.builder()
                    .line(false)
                    .build())
                .build())
            .localPath("string")
            .s3DataDistributionType("string")
            .s3InputMode("string")
            .build())
        .endpointInput(DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs.builder()
            .endpointName("string")
            .localPath("string")
            .s3DataDistributionType("string")
            .s3InputMode("string")
            .build())
        .build())
    .dataQualityJobOutputConfig(DataQualityJobDefinitionDataQualityJobOutputConfigArgs.builder()
        .monitoringOutputs(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs.builder()
            .s3Output(DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs.builder()
                .s3Uri("string")
                .localPath("string")
                .s3UploadMode("string")
                .build())
            .build())
        .kmsKeyId("string")
        .build())
    .jobResources(DataQualityJobDefinitionJobResourcesArgs.builder()
        .clusterConfig(DataQualityJobDefinitionJobResourcesClusterConfigArgs.builder()
            .instanceCount(0)
            .instanceType("string")
            .volumeSizeInGb(0)
            .volumeKmsKeyId("string")
            .build())
        .build())
    .roleArn("string")
    .dataQualityBaselineConfig(DataQualityJobDefinitionDataQualityBaselineConfigArgs.builder()
        .constraintsResource(DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs.builder()
            .s3Uri("string")
            .build())
        .statisticsResource(DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs.builder()
            .s3Uri("string")
            .build())
        .build())
    .name("string")
    .networkConfig(DataQualityJobDefinitionNetworkConfigArgs.builder()
        .enableInterContainerTrafficEncryption(false)
        .enableNetworkIsolation(false)
        .vpcConfig(DataQualityJobDefinitionNetworkConfigVpcConfigArgs.builder()
            .securityGroupIds("string")
            .subnets("string")
            .build())
        .build())
    .stoppingCondition(DataQualityJobDefinitionStoppingConditionArgs.builder()
        .maxRuntimeInSeconds(0)
        .build())
    .tags(Map.of("string", "string"))
    .build());
data_quality_job_definition_resource = aws.sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource",
    data_quality_app_specification={
        "image_uri": "string",
        "environment": {
            "string": "string",
        },
        "post_analytics_processor_source_uri": "string",
        "record_preprocessor_source_uri": "string",
    },
    data_quality_job_input={
        "batch_transform_input": {
            "data_captured_destination_s3_uri": "string",
            "dataset_format": {
                "csv": {
                    "header": False,
                },
                "json": {
                    "line": False,
                },
            },
            "local_path": "string",
            "s3_data_distribution_type": "string",
            "s3_input_mode": "string",
        },
        "endpoint_input": {
            "endpoint_name": "string",
            "local_path": "string",
            "s3_data_distribution_type": "string",
            "s3_input_mode": "string",
        },
    },
    data_quality_job_output_config={
        "monitoring_outputs": {
            "s3_output": {
                "s3_uri": "string",
                "local_path": "string",
                "s3_upload_mode": "string",
            },
        },
        "kms_key_id": "string",
    },
    job_resources={
        "cluster_config": {
            "instance_count": 0,
            "instance_type": "string",
            "volume_size_in_gb": 0,
            "volume_kms_key_id": "string",
        },
    },
    role_arn="string",
    data_quality_baseline_config={
        "constraints_resource": {
            "s3_uri": "string",
        },
        "statistics_resource": {
            "s3_uri": "string",
        },
    },
    name="string",
    network_config={
        "enable_inter_container_traffic_encryption": False,
        "enable_network_isolation": False,
        "vpc_config": {
            "security_group_ids": ["string"],
            "subnets": ["string"],
        },
    },
    stopping_condition={
        "max_runtime_in_seconds": 0,
    },
    tags={
        "string": "string",
    })
const dataQualityJobDefinitionResource = new aws.sagemaker.DataQualityJobDefinition("dataQualityJobDefinitionResource", {
    dataQualityAppSpecification: {
        imageUri: "string",
        environment: {
            string: "string",
        },
        postAnalyticsProcessorSourceUri: "string",
        recordPreprocessorSourceUri: "string",
    },
    dataQualityJobInput: {
        batchTransformInput: {
            dataCapturedDestinationS3Uri: "string",
            datasetFormat: {
                csv: {
                    header: false,
                },
                json: {
                    line: false,
                },
            },
            localPath: "string",
            s3DataDistributionType: "string",
            s3InputMode: "string",
        },
        endpointInput: {
            endpointName: "string",
            localPath: "string",
            s3DataDistributionType: "string",
            s3InputMode: "string",
        },
    },
    dataQualityJobOutputConfig: {
        monitoringOutputs: {
            s3Output: {
                s3Uri: "string",
                localPath: "string",
                s3UploadMode: "string",
            },
        },
        kmsKeyId: "string",
    },
    jobResources: {
        clusterConfig: {
            instanceCount: 0,
            instanceType: "string",
            volumeSizeInGb: 0,
            volumeKmsKeyId: "string",
        },
    },
    roleArn: "string",
    dataQualityBaselineConfig: {
        constraintsResource: {
            s3Uri: "string",
        },
        statisticsResource: {
            s3Uri: "string",
        },
    },
    name: "string",
    networkConfig: {
        enableInterContainerTrafficEncryption: false,
        enableNetworkIsolation: false,
        vpcConfig: {
            securityGroupIds: ["string"],
            subnets: ["string"],
        },
    },
    stoppingCondition: {
        maxRuntimeInSeconds: 0,
    },
    tags: {
        string: "string",
    },
});
type: aws:sagemaker:DataQualityJobDefinition
properties:
    dataQualityAppSpecification:
        environment:
            string: string
        imageUri: string
        postAnalyticsProcessorSourceUri: string
        recordPreprocessorSourceUri: string
    dataQualityBaselineConfig:
        constraintsResource:
            s3Uri: string
        statisticsResource:
            s3Uri: string
    dataQualityJobInput:
        batchTransformInput:
            dataCapturedDestinationS3Uri: string
            datasetFormat:
                csv:
                    header: false
                json:
                    line: false
            localPath: string
            s3DataDistributionType: string
            s3InputMode: string
        endpointInput:
            endpointName: string
            localPath: string
            s3DataDistributionType: string
            s3InputMode: string
    dataQualityJobOutputConfig:
        kmsKeyId: string
        monitoringOutputs:
            s3Output:
                localPath: string
                s3UploadMode: string
                s3Uri: string
    jobResources:
        clusterConfig:
            instanceCount: 0
            instanceType: string
            volumeKmsKeyId: string
            volumeSizeInGb: 0
    name: string
    networkConfig:
        enableInterContainerTrafficEncryption: false
        enableNetworkIsolation: false
        vpcConfig:
            securityGroupIds:
                - string
            subnets:
                - string
    roleArn: string
    stoppingCondition:
        maxRuntimeInSeconds: 0
    tags:
        string: string
DataQualityJobDefinition Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DataQualityJobDefinition resource accepts the following input properties:
- DataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- DataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- DataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- JobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- RoleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- DataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- Name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- NetworkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- StoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Dictionary<string, string>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- DataQuality DataApp Specification Quality Job Definition Data Quality App Specification Args 
- Specifies the container that runs the monitoring job. Fields are documented below.
- DataQuality DataJob Input Quality Job Definition Data Quality Job Input Args 
- A list of inputs for the monitoring job. Fields are documented below.
- DataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config Args 
- The output configuration for monitoring jobs. Fields are documented below.
- JobResources DataQuality Job Definition Job Resources Args 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- RoleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- DataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config Args 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- Name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- NetworkConfig DataQuality Job Definition Network Config Args 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- StoppingCondition DataQuality Job Definition Stopping Condition Args 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- map[string]string
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- dataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- roleArn String
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- dataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- name String
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- stoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Map<String,String>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- dataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- roleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- dataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- stoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- {[key: string]: string}
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- data_quality_ Dataapp_ specification Quality Job Definition Data Quality App Specification Args 
- Specifies the container that runs the monitoring job. Fields are documented below.
- data_quality_ Datajob_ input Quality Job Definition Data Quality Job Input Args 
- A list of inputs for the monitoring job. Fields are documented below.
- data_quality_ Datajob_ output_ config Quality Job Definition Data Quality Job Output Config Args 
- The output configuration for monitoring jobs. Fields are documented below.
- job_resources DataQuality Job Definition Job Resources Args 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- role_arn str
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- data_quality_ Databaseline_ config Quality Job Definition Data Quality Baseline Config Args 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- name str
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- network_config DataQuality Job Definition Network Config Args 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- stopping_condition DataQuality Job Definition Stopping Condition Args 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Mapping[str, str]
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- dataQuality Property MapApp Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality Property MapJob Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality Property MapJob Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources Property Map
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- roleArn String
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- dataQuality Property MapBaseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- name String
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig Property Map
- Specifies networking configuration for the monitoring job. Fields are documented below.
- stoppingCondition Property Map
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Map<String>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the DataQualityJobDefinition resource produces the following output properties:
- Arn string
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- Id string
- The provider-assigned unique ID for this managed resource.
- Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn string
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- id string
- The provider-assigned unique ID for this managed resource.
- {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
Look up Existing DataQualityJobDefinition Resource
Get an existing DataQualityJobDefinition resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DataQualityJobDefinitionState, opts?: CustomResourceOptions): DataQualityJobDefinition@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        arn: Optional[str] = None,
        data_quality_app_specification: Optional[DataQualityJobDefinitionDataQualityAppSpecificationArgs] = None,
        data_quality_baseline_config: Optional[DataQualityJobDefinitionDataQualityBaselineConfigArgs] = None,
        data_quality_job_input: Optional[DataQualityJobDefinitionDataQualityJobInputArgs] = None,
        data_quality_job_output_config: Optional[DataQualityJobDefinitionDataQualityJobOutputConfigArgs] = None,
        job_resources: Optional[DataQualityJobDefinitionJobResourcesArgs] = None,
        name: Optional[str] = None,
        network_config: Optional[DataQualityJobDefinitionNetworkConfigArgs] = None,
        role_arn: Optional[str] = None,
        stopping_condition: Optional[DataQualityJobDefinitionStoppingConditionArgs] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None) -> DataQualityJobDefinitionfunc GetDataQualityJobDefinition(ctx *Context, name string, id IDInput, state *DataQualityJobDefinitionState, opts ...ResourceOption) (*DataQualityJobDefinition, error)public static DataQualityJobDefinition Get(string name, Input<string> id, DataQualityJobDefinitionState? state, CustomResourceOptions? opts = null)public static DataQualityJobDefinition get(String name, Output<String> id, DataQualityJobDefinitionState state, CustomResourceOptions options)resources:  _:    type: aws:sagemaker:DataQualityJobDefinition    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Arn string
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- DataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- DataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- DataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- DataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- JobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- Name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- NetworkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- RoleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- StoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Dictionary<string, string>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Arn string
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- DataQuality DataApp Specification Quality Job Definition Data Quality App Specification Args 
- Specifies the container that runs the monitoring job. Fields are documented below.
- DataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config Args 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- DataQuality DataJob Input Quality Job Definition Data Quality Job Input Args 
- A list of inputs for the monitoring job. Fields are documented below.
- DataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config Args 
- The output configuration for monitoring jobs. Fields are documented below.
- JobResources DataQuality Job Definition Job Resources Args 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- Name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- NetworkConfig DataQuality Job Definition Network Config Args 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- RoleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- StoppingCondition DataQuality Job Definition Stopping Condition Args 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- map[string]string
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- map[string]string
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn String
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- dataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- dataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- name String
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- roleArn String
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- stoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Map<String,String>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn string
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- dataQuality DataApp Specification Quality Job Definition Data Quality App Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality DataBaseline Config Quality Job Definition Data Quality Baseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- dataQuality DataJob Input Quality Job Definition Data Quality Job Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality DataJob Output Config Quality Job Definition Data Quality Job Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources DataQuality Job Definition Job Resources 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- name string
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig DataQuality Job Definition Network Config 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- roleArn string
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- stoppingCondition DataQuality Job Definition Stopping Condition 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- {[key: string]: string}
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn str
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- data_quality_ Dataapp_ specification Quality Job Definition Data Quality App Specification Args 
- Specifies the container that runs the monitoring job. Fields are documented below.
- data_quality_ Databaseline_ config Quality Job Definition Data Quality Baseline Config Args 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- data_quality_ Datajob_ input Quality Job Definition Data Quality Job Input Args 
- A list of inputs for the monitoring job. Fields are documented below.
- data_quality_ Datajob_ output_ config Quality Job Definition Data Quality Job Output Config Args 
- The output configuration for monitoring jobs. Fields are documented below.
- job_resources DataQuality Job Definition Job Resources Args 
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- name str
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- network_config DataQuality Job Definition Network Config Args 
- Specifies networking configuration for the monitoring job. Fields are documented below.
- role_arn str
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- stopping_condition DataQuality Job Definition Stopping Condition Args 
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Mapping[str, str]
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- arn String
- The Amazon Resource Name (ARN) assigned by AWS to this data quality job definition.
- dataQuality Property MapApp Specification 
- Specifies the container that runs the monitoring job. Fields are documented below.
- dataQuality Property MapBaseline Config 
- Configures the constraints and baselines for the monitoring job. Fields are documented below.
- dataQuality Property MapJob Input 
- A list of inputs for the monitoring job. Fields are documented below.
- dataQuality Property MapJob Output Config 
- The output configuration for monitoring jobs. Fields are documented below.
- jobResources Property Map
- Identifies the resources to deploy for a monitoring job. Fields are documented below.
- name String
- The name of the data quality job definition. If omitted, the provider will assign a random, unique name.
- networkConfig Property Map
- Specifies networking configuration for the monitoring job. Fields are documented below.
- roleArn String
- The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
- stoppingCondition Property Map
- A time limit for how long the monitoring job is allowed to run before stopping. Fields are documented below.
- Map<String>
- A mapping of tags to assign to the resource. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
Supporting Types
DataQualityJobDefinitionDataQualityAppSpecification, DataQualityJobDefinitionDataQualityAppSpecificationArgs                
- ImageUri string
- The container image that the data quality monitoring job runs.
- Environment Dictionary<string, string>
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- PostAnalytics stringProcessor Source Uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- RecordPreprocessor stringSource Uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
- ImageUri string
- The container image that the data quality monitoring job runs.
- Environment map[string]string
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- PostAnalytics stringProcessor Source Uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- RecordPreprocessor stringSource Uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
- imageUri String
- The container image that the data quality monitoring job runs.
- environment Map<String,String>
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- postAnalytics StringProcessor Source Uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- recordPreprocessor StringSource Uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
- imageUri string
- The container image that the data quality monitoring job runs.
- environment {[key: string]: string}
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- postAnalytics stringProcessor Source Uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- recordPreprocessor stringSource Uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
- image_uri str
- The container image that the data quality monitoring job runs.
- environment Mapping[str, str]
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- post_analytics_ strprocessor_ source_ uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- record_preprocessor_ strsource_ uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
- imageUri String
- The container image that the data quality monitoring job runs.
- environment Map<String>
- Sets the environment variables in the container that the monitoring job runs. A list of key value pairs.
- postAnalytics StringProcessor Source Uri 
- An Amazon S3 URI to a script that is called after analysis has been performed. Applicable only for the built-in (first party) containers.
- recordPreprocessor StringSource Uri 
- An Amazon S3 URI to a script that is called per row prior to running analysis. It can base64 decode the payload and convert it into a flatted json so that the built-in container can use the converted data. Applicable only for the built-in (first party) containers.
DataQualityJobDefinitionDataQualityBaselineConfig, DataQualityJobDefinitionDataQualityBaselineConfigArgs                
- ConstraintsResource DataQuality Job Definition Data Quality Baseline Config Constraints Resource 
- The constraints resource for a monitoring job. Fields are documented below.
- StatisticsResource DataQuality Job Definition Data Quality Baseline Config Statistics Resource 
- The statistics resource for a monitoring job. Fields are documented below.
- ConstraintsResource DataQuality Job Definition Data Quality Baseline Config Constraints Resource 
- The constraints resource for a monitoring job. Fields are documented below.
- StatisticsResource DataQuality Job Definition Data Quality Baseline Config Statistics Resource 
- The statistics resource for a monitoring job. Fields are documented below.
- constraintsResource DataQuality Job Definition Data Quality Baseline Config Constraints Resource 
- The constraints resource for a monitoring job. Fields are documented below.
- statisticsResource DataQuality Job Definition Data Quality Baseline Config Statistics Resource 
- The statistics resource for a monitoring job. Fields are documented below.
- constraintsResource DataQuality Job Definition Data Quality Baseline Config Constraints Resource 
- The constraints resource for a monitoring job. Fields are documented below.
- statisticsResource DataQuality Job Definition Data Quality Baseline Config Statistics Resource 
- The statistics resource for a monitoring job. Fields are documented below.
- constraints_resource DataQuality Job Definition Data Quality Baseline Config Constraints Resource 
- The constraints resource for a monitoring job. Fields are documented below.
- statistics_resource DataQuality Job Definition Data Quality Baseline Config Statistics Resource 
- The statistics resource for a monitoring job. Fields are documented below.
- constraintsResource Property Map
- The constraints resource for a monitoring job. Fields are documented below.
- statisticsResource Property Map
- The statistics resource for a monitoring job. Fields are documented below.
DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResource, DataQualityJobDefinitionDataQualityBaselineConfigConstraintsResourceArgs                    
- S3Uri string
- The Amazon S3 URI for the constraints resource.
- S3Uri string
- The Amazon S3 URI for the constraints resource.
- s3Uri String
- The Amazon S3 URI for the constraints resource.
- s3Uri string
- The Amazon S3 URI for the constraints resource.
- s3_uri str
- The Amazon S3 URI for the constraints resource.
- s3Uri String
- The Amazon S3 URI for the constraints resource.
DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResource, DataQualityJobDefinitionDataQualityBaselineConfigStatisticsResourceArgs                    
- S3Uri string
- The Amazon S3 URI for the statistics resource.
- S3Uri string
- The Amazon S3 URI for the statistics resource.
- s3Uri String
- The Amazon S3 URI for the statistics resource.
- s3Uri string
- The Amazon S3 URI for the statistics resource.
- s3_uri str
- The Amazon S3 URI for the statistics resource.
- s3Uri String
- The Amazon S3 URI for the statistics resource.
DataQualityJobDefinitionDataQualityJobInput, DataQualityJobDefinitionDataQualityJobInputArgs                
- BatchTransform DataInput Quality Job Definition Data Quality Job Input Batch Transform Input 
- Input object for the batch transform job. Fields are documented below.
- EndpointInput DataQuality Job Definition Data Quality Job Input Endpoint Input 
- Input object for the endpoint. Fields are documented below.
- BatchTransform DataInput Quality Job Definition Data Quality Job Input Batch Transform Input 
- Input object for the batch transform job. Fields are documented below.
- EndpointInput DataQuality Job Definition Data Quality Job Input Endpoint Input 
- Input object for the endpoint. Fields are documented below.
- batchTransform DataInput Quality Job Definition Data Quality Job Input Batch Transform Input 
- Input object for the batch transform job. Fields are documented below.
- endpointInput DataQuality Job Definition Data Quality Job Input Endpoint Input 
- Input object for the endpoint. Fields are documented below.
- batchTransform DataInput Quality Job Definition Data Quality Job Input Batch Transform Input 
- Input object for the batch transform job. Fields are documented below.
- endpointInput DataQuality Job Definition Data Quality Job Input Endpoint Input 
- Input object for the endpoint. Fields are documented below.
- batch_transform_ Datainput Quality Job Definition Data Quality Job Input Batch Transform Input 
- Input object for the batch transform job. Fields are documented below.
- endpoint_input DataQuality Job Definition Data Quality Job Input Endpoint Input 
- Input object for the endpoint. Fields are documented below.
- batchTransform Property MapInput 
- Input object for the batch transform job. Fields are documented below.
- endpointInput Property Map
- Input object for the endpoint. Fields are documented below.
DataQualityJobDefinitionDataQualityJobInputBatchTransformInput, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputArgs                      
- DataCaptured stringDestination S3Uri 
- The Amazon S3 location being used to capture the data.
- DatasetFormat DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format 
- The dataset format for your batch transform job. Fields are documented below.
- LocalPath string
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- S3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- S3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- DataCaptured stringDestination S3Uri 
- The Amazon S3 location being used to capture the data.
- DatasetFormat DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format 
- The dataset format for your batch transform job. Fields are documented below.
- LocalPath string
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- S3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- S3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- dataCaptured StringDestination S3Uri 
- The Amazon S3 location being used to capture the data.
- datasetFormat DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format 
- The dataset format for your batch transform job. Fields are documented below.
- localPath String
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution StringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode String
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- dataCaptured stringDestination S3Uri 
- The Amazon S3 location being used to capture the data.
- datasetFormat DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format 
- The dataset format for your batch transform job. Fields are documented below.
- localPath string
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- data_captured_ strdestination_ s3_ uri 
- The Amazon S3 location being used to capture the data.
- dataset_format DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format 
- The dataset format for your batch transform job. Fields are documented below.
- local_path str
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- s3_data_ strdistribution_ type 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3_input_ strmode 
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- dataCaptured StringDestination S3Uri 
- The Amazon S3 location being used to capture the data.
- datasetFormat Property Map
- The dataset format for your batch transform job. Fields are documented below.
- localPath String
- Path to the filesystem where the batch transform data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution StringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode String
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormat, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatArgs                          
- Csv
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Csv 
- The CSV dataset used in the monitoring job. Fields are documented below.
- Json
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Json 
- The JSON dataset used in the monitoring job. Fields are documented below.
- Csv
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Csv 
- The CSV dataset used in the monitoring job. Fields are documented below.
- Json
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Json 
- The JSON dataset used in the monitoring job. Fields are documented below.
- csv
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Csv 
- The CSV dataset used in the monitoring job. Fields are documented below.
- json
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Json 
- The JSON dataset used in the monitoring job. Fields are documented below.
- csv
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Csv 
- The CSV dataset used in the monitoring job. Fields are documented below.
- json
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Json 
- The JSON dataset used in the monitoring job. Fields are documented below.
- csv
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Csv 
- The CSV dataset used in the monitoring job. Fields are documented below.
- json
DataQuality Job Definition Data Quality Job Input Batch Transform Input Dataset Format Json 
- The JSON dataset used in the monitoring job. Fields are documented below.
- csv Property Map
- The CSV dataset used in the monitoring job. Fields are documented below.
- json Property Map
- The JSON dataset used in the monitoring job. Fields are documented below.
DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsv, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatCsvArgs                            
- Header bool
- Indicates if the CSV data has a header.
- Header bool
- Indicates if the CSV data has a header.
- header Boolean
- Indicates if the CSV data has a header.
- header boolean
- Indicates if the CSV data has a header.
- header bool
- Indicates if the CSV data has a header.
- header Boolean
- Indicates if the CSV data has a header.
DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJson, DataQualityJobDefinitionDataQualityJobInputBatchTransformInputDatasetFormatJsonArgs                            
- Line bool
- Indicates if the file should be read as a json object per line.
- Line bool
- Indicates if the file should be read as a json object per line.
- line Boolean
- Indicates if the file should be read as a json object per line.
- line boolean
- Indicates if the file should be read as a json object per line.
- line bool
- Indicates if the file should be read as a json object per line.
- line Boolean
- Indicates if the file should be read as a json object per line.
DataQualityJobDefinitionDataQualityJobInputEndpointInput, DataQualityJobDefinitionDataQualityJobInputEndpointInputArgs                    
- EndpointName string
- An endpoint in customer's account which has data_capture_configenabled.
- LocalPath string
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- S3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- S3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- EndpointName string
- An endpoint in customer's account which has data_capture_configenabled.
- LocalPath string
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- S3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- S3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- endpointName String
- An endpoint in customer's account which has data_capture_configenabled.
- localPath String
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution StringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode String
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- endpointName string
- An endpoint in customer's account which has data_capture_configenabled.
- localPath string
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution stringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode string
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- endpoint_name str
- An endpoint in customer's account which has data_capture_configenabled.
- local_path str
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- s3_data_ strdistribution_ type 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3_input_ strmode 
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
- endpointName String
- An endpoint in customer's account which has data_capture_configenabled.
- localPath String
- Path to the filesystem where the endpoint data is available to the container. Defaults to /opt/ml/processing/input.
- s3DataDistribution StringType 
- Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key. Defaults to FullyReplicated. Valid values areFullyReplicatedorShardedByS3Key
- s3InputMode String
- Whether the PipeorFileis used as the input mode for transferring data for the monitoring job.Pipemode is recommended for large datasets.Filemode is useful for small files that fit in memory. Defaults toFile. Valid values arePipeorFile
DataQualityJobDefinitionDataQualityJobOutputConfig, DataQualityJobDefinitionDataQualityJobOutputConfigArgs                  
- MonitoringOutputs DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs 
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- KmsKey stringId 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
- MonitoringOutputs DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs 
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- KmsKey stringId 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
- monitoringOutputs DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs 
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- kmsKey StringId 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
- monitoringOutputs DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs 
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- kmsKey stringId 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
- monitoring_outputs DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs 
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- kms_key_ strid 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
- monitoringOutputs Property Map
- Monitoring outputs for monitoring jobs. This is where the output of the periodic monitoring jobs is uploaded. Fields are documented below.
- kmsKey StringId 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption.
DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputs, DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsArgs                      
- S3Output
DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs S3Output 
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
- S3Output
DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs S3Output 
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
- s3Output
DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs S3Output 
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
- s3Output
DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs S3Output 
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
- s3_output DataQuality Job Definition Data Quality Job Output Config Monitoring Outputs S3Output 
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
- s3Output Property Map
- The Amazon S3 storage location where the results of a monitoring job are saved. Fields are documented below.
DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3Output, DataQualityJobDefinitionDataQualityJobOutputConfigMonitoringOutputsS3OutputArgs                        
- S3Uri string
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- LocalPath string
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- S3UploadMode string
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
- S3Uri string
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- LocalPath string
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- S3UploadMode string
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
- s3Uri String
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- localPath String
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- s3UploadMode String
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
- s3Uri string
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- localPath string
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- s3UploadMode string
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
- s3_uri str
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- local_path str
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- s3_upload_ strmode 
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
- s3Uri String
- A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job.
- localPath String
- The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a monitoring job. LocalPath is an absolute path for the output data. Defaults to /opt/ml/processing/output.
- s3UploadMode String
- Whether to upload the results of the monitoring job continuously or after the job completes. Valid values are ContinuousorEndOfJob
DataQualityJobDefinitionJobResources, DataQualityJobDefinitionJobResourcesArgs            
- ClusterConfig DataQuality Job Definition Job Resources Cluster Config 
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
- ClusterConfig DataQuality Job Definition Job Resources Cluster Config 
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
- clusterConfig DataQuality Job Definition Job Resources Cluster Config 
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
- clusterConfig DataQuality Job Definition Job Resources Cluster Config 
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
- cluster_config DataQuality Job Definition Job Resources Cluster Config 
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
- clusterConfig Property Map
- The configuration for the cluster resources used to run the processing job. Fields are documented below.
DataQualityJobDefinitionJobResourcesClusterConfig, DataQualityJobDefinitionJobResourcesClusterConfigArgs                
- InstanceCount int
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- InstanceType string
- The ML compute instance type for the processing job.
- VolumeSize intIn Gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- VolumeKms stringKey Id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
- InstanceCount int
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- InstanceType string
- The ML compute instance type for the processing job.
- VolumeSize intIn Gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- VolumeKms stringKey Id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
- instanceCount Integer
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- instanceType String
- The ML compute instance type for the processing job.
- volumeSize IntegerIn Gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- volumeKms StringKey Id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
- instanceCount number
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- instanceType string
- The ML compute instance type for the processing job.
- volumeSize numberIn Gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- volumeKms stringKey Id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
- instance_count int
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- instance_type str
- The ML compute instance type for the processing job.
- volume_size_ intin_ gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- volume_kms_ strkey_ id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
- instanceCount Number
- The number of ML compute instances to use in the model monitoring job. For distributed processing jobs, specify a value greater than 1.
- instanceType String
- The ML compute instance type for the processing job.
- volumeSize NumberIn Gb 
- The size of the ML storage volume, in gigabytes, that you want to provision. You must specify sufficient ML storage for your scenario.
- volumeKms StringKey Id 
- The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the model monitoring job.
DataQualityJobDefinitionNetworkConfig, DataQualityJobDefinitionNetworkConfigArgs            
- EnableInter boolContainer Traffic Encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- EnableNetwork boolIsolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- VpcConfig DataQuality Job Definition Network Config Vpc Config 
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
- EnableInter boolContainer Traffic Encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- EnableNetwork boolIsolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- VpcConfig DataQuality Job Definition Network Config Vpc Config 
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
- enableInter BooleanContainer Traffic Encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- enableNetwork BooleanIsolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- vpcConfig DataQuality Job Definition Network Config Vpc Config 
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
- enableInter booleanContainer Traffic Encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- enableNetwork booleanIsolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- vpcConfig DataQuality Job Definition Network Config Vpc Config 
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
- enable_inter_ boolcontainer_ traffic_ encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- enable_network_ boolisolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- vpc_config DataQuality Job Definition Network Config Vpc Config 
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
- enableInter BooleanContainer Traffic Encryption 
- Whether to encrypt all communications between the instances used for the monitoring jobs. Choose trueto encrypt communications. Encryption provides greater security for distributed jobs, but the processing might take longer.
- enableNetwork BooleanIsolation 
- Whether to allow inbound and outbound network calls to and from the containers used for the monitoring job.
- vpcConfig Property Map
- Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. Fields are documented below.
DataQualityJobDefinitionNetworkConfigVpcConfig, DataQualityJobDefinitionNetworkConfigVpcConfigArgs                
- SecurityGroup List<string>Ids 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- Subnets List<string>
- The ID of the subnets in the VPC to which you want to connect your training job or model.
- SecurityGroup []stringIds 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- Subnets []string
- The ID of the subnets in the VPC to which you want to connect your training job or model.
- securityGroup List<String>Ids 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- subnets List<String>
- The ID of the subnets in the VPC to which you want to connect your training job or model.
- securityGroup string[]Ids 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- subnets string[]
- The ID of the subnets in the VPC to which you want to connect your training job or model.
- security_group_ Sequence[str]ids 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- subnets Sequence[str]
- The ID of the subnets in the VPC to which you want to connect your training job or model.
- securityGroup List<String>Ids 
- The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the subnetsfield.
- subnets List<String>
- The ID of the subnets in the VPC to which you want to connect your training job or model.
DataQualityJobDefinitionStoppingCondition, DataQualityJobDefinitionStoppingConditionArgs            
- MaxRuntime intIn Seconds 
- The maximum runtime allowed in seconds.
- MaxRuntime intIn Seconds 
- The maximum runtime allowed in seconds.
- maxRuntime IntegerIn Seconds 
- The maximum runtime allowed in seconds.
- maxRuntime numberIn Seconds 
- The maximum runtime allowed in seconds.
- max_runtime_ intin_ seconds 
- The maximum runtime allowed in seconds.
- maxRuntime NumberIn Seconds 
- The maximum runtime allowed in seconds.
Import
Using pulumi import, import data quality job definitions using the name. For example:
$ pulumi import aws:sagemaker/dataQualityJobDefinition:DataQualityJobDefinition test_data_quality_job_definition data-quality-job-definition-foo
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.