gcp.bigquery.Dataset
Explore with Pulumi AI
Example Usage
Bigquery Dataset Basic
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
import pulumi
import pulumi_gcp as gcp
bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
    ])
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()
            .accountId("bqowner")
            .build());
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());
    }
}
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  bqowner:
    type: gcp:serviceaccount:Account
    properties:
      accountId: bqowner
Bigquery Dataset Cmek
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const keyRing = new gcp.kms.KeyRing("key_ring", {
    name: "example-keyring",
    location: "us",
});
const cryptoKey = new gcp.kms.CryptoKey("crypto_key", {
    name: "example-key",
    keyRing: keyRing.id,
});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    defaultTableExpirationMs: 3600000,
    defaultEncryptionConfiguration: {
        kmsKeyName: cryptoKey.id,
    },
});
import pulumi
import pulumi_gcp as gcp
key_ring = gcp.kms.KeyRing("key_ring",
    name="example-keyring",
    location="us")
crypto_key = gcp.kms.CryptoKey("crypto_key",
    name="example-key",
    key_ring=key_ring.id)
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    default_table_expiration_ms=3600000,
    default_encryption_configuration={
        "kms_key_name": crypto_key.id,
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/kms"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		keyRing, err := kms.NewKeyRing(ctx, "key_ring", &kms.KeyRingArgs{
			Name:     pulumi.String("example-keyring"),
			Location: pulumi.String("us"),
		})
		if err != nil {
			return err
		}
		cryptoKey, err := kms.NewCryptoKey(ctx, "crypto_key", &kms.CryptoKeyArgs{
			Name:    pulumi.String("example-key"),
			KeyRing: keyRing.ID(),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("example_dataset"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This is a test description"),
			Location:                 pulumi.String("US"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
				KmsKeyName: cryptoKey.ID(),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var keyRing = new Gcp.Kms.KeyRing("key_ring", new()
    {
        Name = "example-keyring",
        Location = "us",
    });
    var cryptoKey = new Gcp.Kms.CryptoKey("crypto_key", new()
    {
        Name = "example-key",
        KeyRing = keyRing.Id,
    });
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        DefaultTableExpirationMs = 3600000,
        DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
        {
            KmsKeyName = cryptoKey.Id,
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetDefaultEncryptionConfigurationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
            .name("example-keyring")
            .location("us")
            .build());
        var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
            .name("example-key")
            .keyRing(keyRing.id())
            .build());
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .defaultTableExpirationMs(3600000)
            .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
                .kmsKeyName(cryptoKey.id())
                .build())
            .build());
    }
}
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: US
      defaultTableExpirationMs: 3.6e+06
      defaultEncryptionConfiguration:
        kmsKeyName: ${cryptoKey.id}
  cryptoKey:
    type: gcp:kms:CryptoKey
    name: crypto_key
    properties:
      name: example-key
      keyRing: ${keyRing.id}
  keyRing:
    type: gcp:kms:KeyRing
    name: key_ring
    properties:
      name: example-keyring
      location: us
Bigquery Dataset Authorized Dataset
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const bqowner = new gcp.serviceaccount.Account("bqowner", {accountId: "bqowner"});
const _public = new gcp.bigquery.Dataset("public", {
    datasetId: "public",
    friendlyName: "test",
    description: "This dataset is public",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
    ],
});
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "private",
    friendlyName: "test",
    description: "This dataset is private",
    location: "EU",
    defaultTableExpirationMs: 3600000,
    labels: {
        env: "default",
    },
    accesses: [
        {
            role: "OWNER",
            userByEmail: bqowner.email,
        },
        {
            role: "READER",
            domain: "hashicorp.com",
        },
        {
            dataset: {
                dataset: {
                    projectId: _public.project,
                    datasetId: _public.datasetId,
                },
                targetTypes: ["VIEWS"],
            },
        },
    ],
});
import pulumi
import pulumi_gcp as gcp
bqowner = gcp.serviceaccount.Account("bqowner", account_id="bqowner")
public = gcp.bigquery.Dataset("public",
    dataset_id="public",
    friendly_name="test",
    description="This dataset is public",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
    ])
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="private",
    friendly_name="test",
    description="This dataset is private",
    location="EU",
    default_table_expiration_ms=3600000,
    labels={
        "env": "default",
    },
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": bqowner.email,
        },
        {
            "role": "READER",
            "domain": "hashicorp.com",
        },
        {
            "dataset": {
                "dataset": {
                    "project_id": public.project,
                    "dataset_id": public.dataset_id,
                },
                "target_types": ["VIEWS"],
            },
        },
    ])
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/serviceaccount"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		bqowner, err := serviceaccount.NewAccount(ctx, "bqowner", &serviceaccount.AccountArgs{
			AccountId: pulumi.String("bqowner"),
		})
		if err != nil {
			return err
		}
		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("public"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is public"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
			},
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:                pulumi.String("private"),
			FriendlyName:             pulumi.String("test"),
			Description:              pulumi.String("This dataset is private"),
			Location:                 pulumi.String("EU"),
			DefaultTableExpirationMs: pulumi.Int(3600000),
			Labels: pulumi.StringMap{
				"env": pulumi.String("default"),
			},
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: bqowner.Email,
				},
				&bigquery.DatasetAccessTypeArgs{
					Role:   pulumi.String("READER"),
					Domain: pulumi.String("hashicorp.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Dataset: &bigquery.DatasetAccessDatasetArgs{
						Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
							ProjectId: public.Project,
							DatasetId: public.DatasetId,
						},
						TargetTypes: pulumi.StringArray{
							pulumi.String("VIEWS"),
						},
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var bqowner = new Gcp.ServiceAccount.Account("bqowner", new()
    {
        AccountId = "bqowner",
    });
    var @public = new Gcp.BigQuery.Dataset("public", new()
    {
        DatasetId = "public",
        FriendlyName = "test",
        Description = "This dataset is public",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
        },
    });
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "private",
        FriendlyName = "test",
        Description = "This dataset is private",
        Location = "EU",
        DefaultTableExpirationMs = 3600000,
        Labels = 
        {
            { "env", "default" },
        },
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = bqowner.Email,
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "READER",
                Domain = "hashicorp.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
                {
                    Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                    {
                        ProjectId = @public.Project,
                        DatasetId = @public.DatasetId,
                    },
                    TargetTypes = new[]
                    {
                        "VIEWS",
                    },
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.serviceaccount.Account;
import com.pulumi.gcp.serviceaccount.AccountArgs;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessDatasetDatasetArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var bqowner = new Account("bqowner", AccountArgs.builder()
            .accountId("bqowner")
            .build());
        var public_ = new Dataset("public", DatasetArgs.builder()
            .datasetId("public")
            .friendlyName("test")
            .description("This dataset is public")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build())
            .build());
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("private")
            .friendlyName("test")
            .description("This dataset is private")
            .location("EU")
            .defaultTableExpirationMs(3600000)
            .labels(Map.of("env", "default"))
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail(bqowner.email())
                    .build(),
                DatasetAccessArgs.builder()
                    .role("READER")
                    .domain("hashicorp.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .dataset(DatasetAccessDatasetArgs.builder()
                        .dataset(DatasetAccessDatasetDatasetArgs.builder()
                            .projectId(public_.project())
                            .datasetId(public_.datasetId())
                            .build())
                        .targetTypes("VIEWS")
                        .build())
                    .build())
            .build());
    }
}
resources:
  public:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public
      friendlyName: test
      description: This dataset is public
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private
      friendlyName: test
      description: This dataset is private
      location: EU
      defaultTableExpirationMs: 3.6e+06
      labels:
        env: default
      accesses:
        - role: OWNER
          userByEmail: ${bqowner.email}
        - role: READER
          domain: hashicorp.com
        - dataset:
            dataset:
              projectId: ${public.project}
              datasetId: ${public.datasetId}
            targetTypes:
              - VIEWS
  bqowner:
    type: gcp:serviceaccount:Account
    properties:
      accountId: bqowner
Bigquery Dataset Authorized Routine
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const _public = new gcp.bigquery.Dataset("public", {
    datasetId: "public_dataset",
    description: "This dataset is public",
});
const publicRoutine = new gcp.bigquery.Routine("public", {
    datasetId: _public.datasetId,
    routineId: "public_routine",
    routineType: "TABLE_VALUED_FUNCTION",
    language: "SQL",
    definitionBody: "SELECT 1 + value AS value\n",
    arguments: [{
        name: "value",
        argumentKind: "FIXED_TYPE",
        dataType: JSON.stringify({
            typeKind: "INT64",
        }),
    }],
    returnTableType: JSON.stringify({
        columns: [{
            name: "value",
            type: {
                typeKind: "INT64",
            },
        }],
    }),
});
const _private = new gcp.bigquery.Dataset("private", {
    datasetId: "private_dataset",
    description: "This dataset is private",
    accesses: [
        {
            role: "OWNER",
            userByEmail: "my@service-account.com",
        },
        {
            routine: {
                projectId: publicRoutine.project,
                datasetId: publicRoutine.datasetId,
                routineId: publicRoutine.routineId,
            },
        },
    ],
});
import pulumi
import json
import pulumi_gcp as gcp
public = gcp.bigquery.Dataset("public",
    dataset_id="public_dataset",
    description="This dataset is public")
public_routine = gcp.bigquery.Routine("public",
    dataset_id=public.dataset_id,
    routine_id="public_routine",
    routine_type="TABLE_VALUED_FUNCTION",
    language="SQL",
    definition_body="SELECT 1 + value AS value\n",
    arguments=[{
        "name": "value",
        "argument_kind": "FIXED_TYPE",
        "data_type": json.dumps({
            "typeKind": "INT64",
        }),
    }],
    return_table_type=json.dumps({
        "columns": [{
            "name": "value",
            "type": {
                "typeKind": "INT64",
            },
        }],
    }))
private = gcp.bigquery.Dataset("private",
    dataset_id="private_dataset",
    description="This dataset is private",
    accesses=[
        {
            "role": "OWNER",
            "user_by_email": "my@service-account.com",
        },
        {
            "routine": {
                "project_id": public_routine.project,
                "dataset_id": public_routine.dataset_id,
                "routine_id": public_routine.routine_id,
            },
        },
    ])
package main
import (
	"encoding/json"
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		public, err := bigquery.NewDataset(ctx, "public", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("public_dataset"),
			Description: pulumi.String("This dataset is public"),
		})
		if err != nil {
			return err
		}
		tmpJSON0, err := json.Marshal(map[string]interface{}{
			"typeKind": "INT64",
		})
		if err != nil {
			return err
		}
		json0 := string(tmpJSON0)
		tmpJSON1, err := json.Marshal(map[string]interface{}{
			"columns": []map[string]interface{}{
				map[string]interface{}{
					"name": "value",
					"type": map[string]interface{}{
						"typeKind": "INT64",
					},
				},
			},
		})
		if err != nil {
			return err
		}
		json1 := string(tmpJSON1)
		publicRoutine, err := bigquery.NewRoutine(ctx, "public", &bigquery.RoutineArgs{
			DatasetId:      public.DatasetId,
			RoutineId:      pulumi.String("public_routine"),
			RoutineType:    pulumi.String("TABLE_VALUED_FUNCTION"),
			Language:       pulumi.String("SQL"),
			DefinitionBody: pulumi.String("SELECT 1 + value AS value\n"),
			Arguments: bigquery.RoutineArgumentArray{
				&bigquery.RoutineArgumentArgs{
					Name:         pulumi.String("value"),
					ArgumentKind: pulumi.String("FIXED_TYPE"),
					DataType:     pulumi.String(json0),
				},
			},
			ReturnTableType: pulumi.String(json1),
		})
		if err != nil {
			return err
		}
		_, err = bigquery.NewDataset(ctx, "private", &bigquery.DatasetArgs{
			DatasetId:   pulumi.String("private_dataset"),
			Description: pulumi.String("This dataset is private"),
			Accesses: bigquery.DatasetAccessTypeArray{
				&bigquery.DatasetAccessTypeArgs{
					Role:        pulumi.String("OWNER"),
					UserByEmail: pulumi.String("my@service-account.com"),
				},
				&bigquery.DatasetAccessTypeArgs{
					Routine: &bigquery.DatasetAccessRoutineArgs{
						ProjectId: publicRoutine.Project,
						DatasetId: publicRoutine.DatasetId,
						RoutineId: publicRoutine.RoutineId,
					},
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var @public = new Gcp.BigQuery.Dataset("public", new()
    {
        DatasetId = "public_dataset",
        Description = "This dataset is public",
    });
    var publicRoutine = new Gcp.BigQuery.Routine("public", new()
    {
        DatasetId = @public.DatasetId,
        RoutineId = "public_routine",
        RoutineType = "TABLE_VALUED_FUNCTION",
        Language = "SQL",
        DefinitionBody = @"SELECT 1 + value AS value
",
        Arguments = new[]
        {
            new Gcp.BigQuery.Inputs.RoutineArgumentArgs
            {
                Name = "value",
                ArgumentKind = "FIXED_TYPE",
                DataType = JsonSerializer.Serialize(new Dictionary<string, object?>
                {
                    ["typeKind"] = "INT64",
                }),
            },
        },
        ReturnTableType = JsonSerializer.Serialize(new Dictionary<string, object?>
        {
            ["columns"] = new[]
            {
                new Dictionary<string, object?>
                {
                    ["name"] = "value",
                    ["type"] = new Dictionary<string, object?>
                    {
                        ["typeKind"] = "INT64",
                    },
                },
            },
        }),
    });
    var @private = new Gcp.BigQuery.Dataset("private", new()
    {
        DatasetId = "private_dataset",
        Description = "This dataset is private",
        Accesses = new[]
        {
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Role = "OWNER",
                UserByEmail = "my@service-account.com",
            },
            new Gcp.BigQuery.Inputs.DatasetAccessArgs
            {
                Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
                {
                    ProjectId = publicRoutine.Project,
                    DatasetId = publicRoutine.DatasetId,
                    RoutineId = publicRoutine.RoutineId,
                },
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.Routine;
import com.pulumi.gcp.bigquery.RoutineArgs;
import com.pulumi.gcp.bigquery.inputs.RoutineArgumentArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetAccessRoutineArgs;
import static com.pulumi.codegen.internal.Serialization.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var public_ = new Dataset("public", DatasetArgs.builder()
            .datasetId("public_dataset")
            .description("This dataset is public")
            .build());
        var publicRoutine = new Routine("publicRoutine", RoutineArgs.builder()
            .datasetId(public_.datasetId())
            .routineId("public_routine")
            .routineType("TABLE_VALUED_FUNCTION")
            .language("SQL")
            .definitionBody("""
SELECT 1 + value AS value
            """)
            .arguments(RoutineArgumentArgs.builder()
                .name("value")
                .argumentKind("FIXED_TYPE")
                .dataType(serializeJson(
                    jsonObject(
                        jsonProperty("typeKind", "INT64")
                    )))
                .build())
            .returnTableType(serializeJson(
                jsonObject(
                    jsonProperty("columns", jsonArray(jsonObject(
                        jsonProperty("name", "value"),
                        jsonProperty("type", jsonObject(
                            jsonProperty("typeKind", "INT64")
                        ))
                    )))
                )))
            .build());
        var private_ = new Dataset("private", DatasetArgs.builder()
            .datasetId("private_dataset")
            .description("This dataset is private")
            .accesses(            
                DatasetAccessArgs.builder()
                    .role("OWNER")
                    .userByEmail("my@service-account.com")
                    .build(),
                DatasetAccessArgs.builder()
                    .routine(DatasetAccessRoutineArgs.builder()
                        .projectId(publicRoutine.project())
                        .datasetId(publicRoutine.datasetId())
                        .routineId(publicRoutine.routineId())
                        .build())
                    .build())
            .build());
    }
}
resources:
  public:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: public_dataset
      description: This dataset is public
  publicRoutine:
    type: gcp:bigquery:Routine
    name: public
    properties:
      datasetId: ${public.datasetId}
      routineId: public_routine
      routineType: TABLE_VALUED_FUNCTION
      language: SQL
      definitionBody: |
        SELECT 1 + value AS value        
      arguments:
        - name: value
          argumentKind: FIXED_TYPE
          dataType:
            fn::toJSON:
              typeKind: INT64
      returnTableType:
        fn::toJSON:
          columns:
            - name: value
              type:
                typeKind: INT64
  private:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: private_dataset
      description: This dataset is private
      accesses:
        - role: OWNER
          userByEmail: my@service-account.com
        - routine:
            projectId: ${publicRoutine.project}
            datasetId: ${publicRoutine.datasetId}
            routineId: ${publicRoutine.routineId}
Bigquery Dataset External Reference Aws
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "aws-us-east-1",
    externalDatasetReference: {
        externalSource: "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
        connection: "projects/project/locations/aws-us-east-1/connections/connection",
    },
});
import pulumi
import pulumi_gcp as gcp
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="aws-us-east-1",
    external_dataset_reference={
        "external_source": "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
        "connection": "projects/project/locations/aws-us-east-1/connections/connection",
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("example_dataset"),
			FriendlyName: pulumi.String("test"),
			Description:  pulumi.String("This is a test description"),
			Location:     pulumi.String("aws-us-east-1"),
			ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
				ExternalSource: pulumi.String("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database"),
				Connection:     pulumi.String("projects/project/locations/aws-us-east-1/connections/connection"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "aws-us-east-1",
        ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
        {
            ExternalSource = "aws-glue://arn:aws:glue:us-east-1:999999999999:database/database",
            Connection = "projects/project/locations/aws-us-east-1/connections/connection",
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetExternalDatasetReferenceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("aws-us-east-1")
            .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
                .externalSource("aws-glue://arn:aws:glue:us-east-1:999999999999:database/database")
                .connection("projects/project/locations/aws-us-east-1/connections/connection")
                .build())
            .build());
    }
}
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: aws-us-east-1
      externalDatasetReference:
        externalSource: aws-glue://arn:aws:glue:us-east-1:999999999999:database/database
        connection: projects/project/locations/aws-us-east-1/connections/connection
Bigquery Dataset External Catalog Dataset Options
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const dataset = new gcp.bigquery.Dataset("dataset", {
    datasetId: "example_dataset",
    friendlyName: "test",
    description: "This is a test description",
    location: "US",
    externalCatalogDatasetOptions: {
        parameters: {
            dataset_owner: "test_dataset_owner",
        },
        defaultStorageLocationUri: "gs://test_dataset/tables",
    },
});
import pulumi
import pulumi_gcp as gcp
dataset = gcp.bigquery.Dataset("dataset",
    dataset_id="example_dataset",
    friendly_name="test",
    description="This is a test description",
    location="US",
    external_catalog_dataset_options={
        "parameters": {
            "dataset_owner": "test_dataset_owner",
        },
        "default_storage_location_uri": "gs://test_dataset/tables",
    })
package main
import (
	"github.com/pulumi/pulumi-gcp/sdk/v8/go/gcp/bigquery"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := bigquery.NewDataset(ctx, "dataset", &bigquery.DatasetArgs{
			DatasetId:    pulumi.String("example_dataset"),
			FriendlyName: pulumi.String("test"),
			Description:  pulumi.String("This is a test description"),
			Location:     pulumi.String("US"),
			ExternalCatalogDatasetOptions: &bigquery.DatasetExternalCatalogDatasetOptionsArgs{
				Parameters: pulumi.StringMap{
					"dataset_owner": pulumi.String("test_dataset_owner"),
				},
				DefaultStorageLocationUri: pulumi.String("gs://test_dataset/tables"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() => 
{
    var dataset = new Gcp.BigQuery.Dataset("dataset", new()
    {
        DatasetId = "example_dataset",
        FriendlyName = "test",
        Description = "This is a test description",
        Location = "US",
        ExternalCatalogDatasetOptions = new Gcp.BigQuery.Inputs.DatasetExternalCatalogDatasetOptionsArgs
        {
            Parameters = 
            {
                { "dataset_owner", "test_dataset_owner" },
            },
            DefaultStorageLocationUri = "gs://test_dataset/tables",
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.bigquery.inputs.DatasetExternalCatalogDatasetOptionsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var dataset = new Dataset("dataset", DatasetArgs.builder()
            .datasetId("example_dataset")
            .friendlyName("test")
            .description("This is a test description")
            .location("US")
            .externalCatalogDatasetOptions(DatasetExternalCatalogDatasetOptionsArgs.builder()
                .parameters(Map.of("dataset_owner", "test_dataset_owner"))
                .defaultStorageLocationUri("gs://test_dataset/tables")
                .build())
            .build());
    }
}
resources:
  dataset:
    type: gcp:bigquery:Dataset
    properties:
      datasetId: example_dataset
      friendlyName: test
      description: This is a test description
      location: US
      externalCatalogDatasetOptions:
        parameters:
          dataset_owner: test_dataset_owner
        defaultStorageLocationUri: gs://test_dataset/tables
Create Dataset Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Dataset(name: string, args: DatasetArgs, opts?: CustomResourceOptions);@overload
def Dataset(resource_name: str,
            args: DatasetArgs,
            opts: Optional[ResourceOptions] = None)
@overload
def Dataset(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            dataset_id: Optional[str] = None,
            external_catalog_dataset_options: Optional[DatasetExternalCatalogDatasetOptionsArgs] = None,
            default_table_expiration_ms: Optional[int] = None,
            external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
            default_partition_expiration_ms: Optional[int] = None,
            friendly_name: Optional[str] = None,
            delete_contents_on_destroy: Optional[bool] = None,
            description: Optional[str] = None,
            is_case_insensitive: Optional[bool] = None,
            default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
            default_collation: Optional[str] = None,
            accesses: Optional[Sequence[DatasetAccessArgs]] = None,
            labels: Optional[Mapping[str, str]] = None,
            location: Optional[str] = None,
            max_time_travel_hours: Optional[str] = None,
            project: Optional[str] = None,
            resource_tags: Optional[Mapping[str, str]] = None,
            storage_billing_model: Optional[str] = None)func NewDataset(ctx *Context, name string, args DatasetArgs, opts ...ResourceOption) (*Dataset, error)public Dataset(string name, DatasetArgs args, CustomResourceOptions? opts = null)
public Dataset(String name, DatasetArgs args)
public Dataset(String name, DatasetArgs args, CustomResourceOptions options)
type: gcp:bigquery:Dataset
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DatasetArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DatasetArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DatasetArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DatasetArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var datasetResource = new Gcp.BigQuery.Dataset("datasetResource", new()
{
    DatasetId = "string",
    ExternalCatalogDatasetOptions = new Gcp.BigQuery.Inputs.DatasetExternalCatalogDatasetOptionsArgs
    {
        DefaultStorageLocationUri = "string",
        Parameters = 
        {
            { "string", "string" },
        },
    },
    DefaultTableExpirationMs = 0,
    ExternalDatasetReference = new Gcp.BigQuery.Inputs.DatasetExternalDatasetReferenceArgs
    {
        Connection = "string",
        ExternalSource = "string",
    },
    DefaultPartitionExpirationMs = 0,
    FriendlyName = "string",
    DeleteContentsOnDestroy = false,
    Description = "string",
    IsCaseInsensitive = false,
    DefaultEncryptionConfiguration = new Gcp.BigQuery.Inputs.DatasetDefaultEncryptionConfigurationArgs
    {
        KmsKeyName = "string",
    },
    DefaultCollation = "string",
    Accesses = new[]
    {
        new Gcp.BigQuery.Inputs.DatasetAccessArgs
        {
            Condition = new Gcp.BigQuery.Inputs.DatasetAccessConditionArgs
            {
                Expression = "string",
                Description = "string",
                Location = "string",
                Title = "string",
            },
            Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetArgs
            {
                Dataset = new Gcp.BigQuery.Inputs.DatasetAccessDatasetDatasetArgs
                {
                    DatasetId = "string",
                    ProjectId = "string",
                },
                TargetTypes = new[]
                {
                    "string",
                },
            },
            Domain = "string",
            GroupByEmail = "string",
            IamMember = "string",
            Role = "string",
            Routine = new Gcp.BigQuery.Inputs.DatasetAccessRoutineArgs
            {
                DatasetId = "string",
                ProjectId = "string",
                RoutineId = "string",
            },
            SpecialGroup = "string",
            UserByEmail = "string",
            View = new Gcp.BigQuery.Inputs.DatasetAccessViewArgs
            {
                DatasetId = "string",
                ProjectId = "string",
                TableId = "string",
            },
        },
    },
    Labels = 
    {
        { "string", "string" },
    },
    Location = "string",
    MaxTimeTravelHours = "string",
    Project = "string",
    ResourceTags = 
    {
        { "string", "string" },
    },
    StorageBillingModel = "string",
});
example, err := bigquery.NewDataset(ctx, "datasetResource", &bigquery.DatasetArgs{
	DatasetId: pulumi.String("string"),
	ExternalCatalogDatasetOptions: &bigquery.DatasetExternalCatalogDatasetOptionsArgs{
		DefaultStorageLocationUri: pulumi.String("string"),
		Parameters: pulumi.StringMap{
			"string": pulumi.String("string"),
		},
	},
	DefaultTableExpirationMs: pulumi.Int(0),
	ExternalDatasetReference: &bigquery.DatasetExternalDatasetReferenceArgs{
		Connection:     pulumi.String("string"),
		ExternalSource: pulumi.String("string"),
	},
	DefaultPartitionExpirationMs: pulumi.Int(0),
	FriendlyName:                 pulumi.String("string"),
	DeleteContentsOnDestroy:      pulumi.Bool(false),
	Description:                  pulumi.String("string"),
	IsCaseInsensitive:            pulumi.Bool(false),
	DefaultEncryptionConfiguration: &bigquery.DatasetDefaultEncryptionConfigurationArgs{
		KmsKeyName: pulumi.String("string"),
	},
	DefaultCollation: pulumi.String("string"),
	Accesses: bigquery.DatasetAccessTypeArray{
		&bigquery.DatasetAccessTypeArgs{
			Condition: &bigquery.DatasetAccessConditionArgs{
				Expression:  pulumi.String("string"),
				Description: pulumi.String("string"),
				Location:    pulumi.String("string"),
				Title:       pulumi.String("string"),
			},
			Dataset: &bigquery.DatasetAccessDatasetArgs{
				Dataset: &bigquery.DatasetAccessDatasetDatasetArgs{
					DatasetId: pulumi.String("string"),
					ProjectId: pulumi.String("string"),
				},
				TargetTypes: pulumi.StringArray{
					pulumi.String("string"),
				},
			},
			Domain:       pulumi.String("string"),
			GroupByEmail: pulumi.String("string"),
			IamMember:    pulumi.String("string"),
			Role:         pulumi.String("string"),
			Routine: &bigquery.DatasetAccessRoutineArgs{
				DatasetId: pulumi.String("string"),
				ProjectId: pulumi.String("string"),
				RoutineId: pulumi.String("string"),
			},
			SpecialGroup: pulumi.String("string"),
			UserByEmail:  pulumi.String("string"),
			View: &bigquery.DatasetAccessViewArgs{
				DatasetId: pulumi.String("string"),
				ProjectId: pulumi.String("string"),
				TableId:   pulumi.String("string"),
			},
		},
	},
	Labels: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	Location:           pulumi.String("string"),
	MaxTimeTravelHours: pulumi.String("string"),
	Project:            pulumi.String("string"),
	ResourceTags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	StorageBillingModel: pulumi.String("string"),
})
var datasetResource = new Dataset("datasetResource", DatasetArgs.builder()
    .datasetId("string")
    .externalCatalogDatasetOptions(DatasetExternalCatalogDatasetOptionsArgs.builder()
        .defaultStorageLocationUri("string")
        .parameters(Map.of("string", "string"))
        .build())
    .defaultTableExpirationMs(0)
    .externalDatasetReference(DatasetExternalDatasetReferenceArgs.builder()
        .connection("string")
        .externalSource("string")
        .build())
    .defaultPartitionExpirationMs(0)
    .friendlyName("string")
    .deleteContentsOnDestroy(false)
    .description("string")
    .isCaseInsensitive(false)
    .defaultEncryptionConfiguration(DatasetDefaultEncryptionConfigurationArgs.builder()
        .kmsKeyName("string")
        .build())
    .defaultCollation("string")
    .accesses(DatasetAccessArgs.builder()
        .condition(DatasetAccessConditionArgs.builder()
            .expression("string")
            .description("string")
            .location("string")
            .title("string")
            .build())
        .dataset(DatasetAccessDatasetArgs.builder()
            .dataset(DatasetAccessDatasetDatasetArgs.builder()
                .datasetId("string")
                .projectId("string")
                .build())
            .targetTypes("string")
            .build())
        .domain("string")
        .groupByEmail("string")
        .iamMember("string")
        .role("string")
        .routine(DatasetAccessRoutineArgs.builder()
            .datasetId("string")
            .projectId("string")
            .routineId("string")
            .build())
        .specialGroup("string")
        .userByEmail("string")
        .view(DatasetAccessViewArgs.builder()
            .datasetId("string")
            .projectId("string")
            .tableId("string")
            .build())
        .build())
    .labels(Map.of("string", "string"))
    .location("string")
    .maxTimeTravelHours("string")
    .project("string")
    .resourceTags(Map.of("string", "string"))
    .storageBillingModel("string")
    .build());
dataset_resource = gcp.bigquery.Dataset("datasetResource",
    dataset_id="string",
    external_catalog_dataset_options={
        "default_storage_location_uri": "string",
        "parameters": {
            "string": "string",
        },
    },
    default_table_expiration_ms=0,
    external_dataset_reference={
        "connection": "string",
        "external_source": "string",
    },
    default_partition_expiration_ms=0,
    friendly_name="string",
    delete_contents_on_destroy=False,
    description="string",
    is_case_insensitive=False,
    default_encryption_configuration={
        "kms_key_name": "string",
    },
    default_collation="string",
    accesses=[{
        "condition": {
            "expression": "string",
            "description": "string",
            "location": "string",
            "title": "string",
        },
        "dataset": {
            "dataset": {
                "dataset_id": "string",
                "project_id": "string",
            },
            "target_types": ["string"],
        },
        "domain": "string",
        "group_by_email": "string",
        "iam_member": "string",
        "role": "string",
        "routine": {
            "dataset_id": "string",
            "project_id": "string",
            "routine_id": "string",
        },
        "special_group": "string",
        "user_by_email": "string",
        "view": {
            "dataset_id": "string",
            "project_id": "string",
            "table_id": "string",
        },
    }],
    labels={
        "string": "string",
    },
    location="string",
    max_time_travel_hours="string",
    project="string",
    resource_tags={
        "string": "string",
    },
    storage_billing_model="string")
const datasetResource = new gcp.bigquery.Dataset("datasetResource", {
    datasetId: "string",
    externalCatalogDatasetOptions: {
        defaultStorageLocationUri: "string",
        parameters: {
            string: "string",
        },
    },
    defaultTableExpirationMs: 0,
    externalDatasetReference: {
        connection: "string",
        externalSource: "string",
    },
    defaultPartitionExpirationMs: 0,
    friendlyName: "string",
    deleteContentsOnDestroy: false,
    description: "string",
    isCaseInsensitive: false,
    defaultEncryptionConfiguration: {
        kmsKeyName: "string",
    },
    defaultCollation: "string",
    accesses: [{
        condition: {
            expression: "string",
            description: "string",
            location: "string",
            title: "string",
        },
        dataset: {
            dataset: {
                datasetId: "string",
                projectId: "string",
            },
            targetTypes: ["string"],
        },
        domain: "string",
        groupByEmail: "string",
        iamMember: "string",
        role: "string",
        routine: {
            datasetId: "string",
            projectId: "string",
            routineId: "string",
        },
        specialGroup: "string",
        userByEmail: "string",
        view: {
            datasetId: "string",
            projectId: "string",
            tableId: "string",
        },
    }],
    labels: {
        string: "string",
    },
    location: "string",
    maxTimeTravelHours: "string",
    project: "string",
    resourceTags: {
        string: "string",
    },
    storageBillingModel: "string",
});
type: gcp:bigquery:Dataset
properties:
    accesses:
        - condition:
            description: string
            expression: string
            location: string
            title: string
          dataset:
            dataset:
                datasetId: string
                projectId: string
            targetTypes:
                - string
          domain: string
          groupByEmail: string
          iamMember: string
          role: string
          routine:
            datasetId: string
            projectId: string
            routineId: string
          specialGroup: string
          userByEmail: string
          view:
            datasetId: string
            projectId: string
            tableId: string
    datasetId: string
    defaultCollation: string
    defaultEncryptionConfiguration:
        kmsKeyName: string
    defaultPartitionExpirationMs: 0
    defaultTableExpirationMs: 0
    deleteContentsOnDestroy: false
    description: string
    externalCatalogDatasetOptions:
        defaultStorageLocationUri: string
        parameters:
            string: string
    externalDatasetReference:
        connection: string
        externalSource: string
    friendlyName: string
    isCaseInsensitive: false
    labels:
        string: string
    location: string
    maxTimeTravelHours: string
    project: string
    resourceTags:
        string: string
    storageBillingModel: string
Dataset Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Dataset resource accepts the following input properties:
- DatasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- Accesses
List<DatasetAccess> 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- DefaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- DefaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- DefaultPartition intExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- DefaultTable intExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- DeleteContents boolOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- Description string
- A user-friendly description of the dataset
- ExternalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- ExternalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- FriendlyName string
- A descriptive name for the dataset
- IsCase boolInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- Labels Dictionary<string, string>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- Location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- MaxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Dictionary<string, string>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- StorageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- DatasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- Accesses
[]DatasetAccess Type Args 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- DefaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- DefaultEncryption DatasetConfiguration Default Encryption Configuration Args 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- DefaultPartition intExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- DefaultTable intExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- DeleteContents boolOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- Description string
- A user-friendly description of the dataset
- ExternalCatalog DatasetDataset Options External Catalog Dataset Options Args 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- ExternalDataset DatasetReference External Dataset Reference Args 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- FriendlyName string
- A descriptive name for the dataset
- IsCase boolInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- Labels map[string]string
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- Location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- MaxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- map[string]string
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- StorageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- datasetId String
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- accesses
List<DatasetAccess> 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- defaultCollation String
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition IntegerExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable IntegerExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents BooleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description String
- A user-friendly description of the dataset
- externalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName String
- A descriptive name for the dataset
- isCase BooleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Map<String,String>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- location String
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime StringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Map<String,String>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- storageBilling StringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- datasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- accesses
DatasetAccess[] 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- defaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition numberExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable numberExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents booleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description string
- A user-friendly description of the dataset
- externalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName string
- A descriptive name for the dataset
- isCase booleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels {[key: string]: string}
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- {[key: string]: string}
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- storageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- dataset_id str
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- accesses
Sequence[DatasetAccess Args] 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- default_collation str
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- default_encryption_ Datasetconfiguration Default Encryption Configuration Args 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- default_partition_ intexpiration_ ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- default_table_ intexpiration_ ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- delete_contents_ boolon_ destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description str
- A user-friendly description of the dataset
- external_catalog_ Datasetdataset_ options External Catalog Dataset Options Args 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- external_dataset_ Datasetreference External Dataset Reference Args 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendly_name str
- A descriptive name for the dataset
- is_case_ boolinsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Mapping[str, str]
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- location str
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- max_time_ strtravel_ hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Mapping[str, str]
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- storage_billing_ strmodel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- datasetId String
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- accesses List<Property Map>
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- defaultCollation String
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption Property MapConfiguration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition NumberExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable NumberExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents BooleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description String
- A user-friendly description of the dataset
- externalCatalog Property MapDataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset Property MapReference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName String
- A descriptive name for the dataset
- isCase BooleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Map<String>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- location String
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime StringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Map<String>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- storageBilling StringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
Outputs
All input properties are implicitly available as output properties. Additionally, the Dataset resource produces the following output properties:
- CreationTime int
- The time when this dataset was created, in milliseconds since the epoch.
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Etag string
- A hash of the resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified intTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- SelfLink string
- The URI of the created resource.
- CreationTime int
- The time when this dataset was created, in milliseconds since the epoch.
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Etag string
- A hash of the resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastModified intTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- SelfLink string
- The URI of the created resource.
- creationTime Integer
- The time when this dataset was created, in milliseconds since the epoch.
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag String
- A hash of the resource.
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified IntegerTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- selfLink String
- The URI of the created resource.
- creationTime number
- The time when this dataset was created, in milliseconds since the epoch.
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag string
- A hash of the resource.
- id string
- The provider-assigned unique ID for this managed resource.
- lastModified numberTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- selfLink string
- The URI of the created resource.
- creation_time int
- The time when this dataset was created, in milliseconds since the epoch.
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag str
- A hash of the resource.
- id str
- The provider-assigned unique ID for this managed resource.
- last_modified_ inttime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- self_link str
- The URI of the created resource.
- creationTime Number
- The time when this dataset was created, in milliseconds since the epoch.
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag String
- A hash of the resource.
- id String
- The provider-assigned unique ID for this managed resource.
- lastModified NumberTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- selfLink String
- The URI of the created resource.
Look up Existing Dataset Resource
Get an existing Dataset resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DatasetState, opts?: CustomResourceOptions): Dataset@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        accesses: Optional[Sequence[DatasetAccessArgs]] = None,
        creation_time: Optional[int] = None,
        dataset_id: Optional[str] = None,
        default_collation: Optional[str] = None,
        default_encryption_configuration: Optional[DatasetDefaultEncryptionConfigurationArgs] = None,
        default_partition_expiration_ms: Optional[int] = None,
        default_table_expiration_ms: Optional[int] = None,
        delete_contents_on_destroy: Optional[bool] = None,
        description: Optional[str] = None,
        effective_labels: Optional[Mapping[str, str]] = None,
        etag: Optional[str] = None,
        external_catalog_dataset_options: Optional[DatasetExternalCatalogDatasetOptionsArgs] = None,
        external_dataset_reference: Optional[DatasetExternalDatasetReferenceArgs] = None,
        friendly_name: Optional[str] = None,
        is_case_insensitive: Optional[bool] = None,
        labels: Optional[Mapping[str, str]] = None,
        last_modified_time: Optional[int] = None,
        location: Optional[str] = None,
        max_time_travel_hours: Optional[str] = None,
        project: Optional[str] = None,
        pulumi_labels: Optional[Mapping[str, str]] = None,
        resource_tags: Optional[Mapping[str, str]] = None,
        self_link: Optional[str] = None,
        storage_billing_model: Optional[str] = None) -> Datasetfunc GetDataset(ctx *Context, name string, id IDInput, state *DatasetState, opts ...ResourceOption) (*Dataset, error)public static Dataset Get(string name, Input<string> id, DatasetState? state, CustomResourceOptions? opts = null)public static Dataset get(String name, Output<String> id, DatasetState state, CustomResourceOptions options)resources:  _:    type: gcp:bigquery:Dataset    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Accesses
List<DatasetAccess> 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- CreationTime int
- The time when this dataset was created, in milliseconds since the epoch.
- DatasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- DefaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- DefaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- DefaultPartition intExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- DefaultTable intExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- DeleteContents boolOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- Description string
- A user-friendly description of the dataset
- EffectiveLabels Dictionary<string, string>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Etag string
- A hash of the resource.
- ExternalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- ExternalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- FriendlyName string
- A descriptive name for the dataset
- IsCase boolInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- Labels Dictionary<string, string>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- LastModified intTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- Location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- MaxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- PulumiLabels Dictionary<string, string>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Dictionary<string, string>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- SelfLink string
- The URI of the created resource.
- StorageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- Accesses
[]DatasetAccess Type Args 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- CreationTime int
- The time when this dataset was created, in milliseconds since the epoch.
- DatasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- DefaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- DefaultEncryption DatasetConfiguration Default Encryption Configuration Args 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- DefaultPartition intExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- DefaultTable intExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- DeleteContents boolOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- Description string
- A user-friendly description of the dataset
- EffectiveLabels map[string]string
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- Etag string
- A hash of the resource.
- ExternalCatalog DatasetDataset Options External Catalog Dataset Options Args 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- ExternalDataset DatasetReference External Dataset Reference Args 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- FriendlyName string
- A descriptive name for the dataset
- IsCase boolInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- Labels map[string]string
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- LastModified intTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- Location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- MaxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- Project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- PulumiLabels map[string]string
- The combination of labels configured directly on the resource and default labels configured on the provider.
- map[string]string
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- SelfLink string
- The URI of the created resource.
- StorageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- accesses
List<DatasetAccess> 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- creationTime Integer
- The time when this dataset was created, in milliseconds since the epoch.
- datasetId String
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- defaultCollation String
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition IntegerExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable IntegerExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents BooleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description String
- A user-friendly description of the dataset
- effectiveLabels Map<String,String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag String
- A hash of the resource.
- externalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName String
- A descriptive name for the dataset
- isCase BooleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Map<String,String>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- lastModified IntegerTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- location String
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime StringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumiLabels Map<String,String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Map<String,String>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- selfLink String
- The URI of the created resource.
- storageBilling StringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- accesses
DatasetAccess[] 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- creationTime number
- The time when this dataset was created, in milliseconds since the epoch.
- datasetId string
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- defaultCollation string
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption DatasetConfiguration Default Encryption Configuration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition numberExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable numberExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents booleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description string
- A user-friendly description of the dataset
- effectiveLabels {[key: string]: string}
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag string
- A hash of the resource.
- externalCatalog DatasetDataset Options External Catalog Dataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset DatasetReference External Dataset Reference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName string
- A descriptive name for the dataset
- isCase booleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels {[key: string]: string}
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- lastModified numberTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- location string
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime stringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project string
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumiLabels {[key: string]: string}
- The combination of labels configured directly on the resource and default labels configured on the provider.
- {[key: string]: string}
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- selfLink string
- The URI of the created resource.
- storageBilling stringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- accesses
Sequence[DatasetAccess Args] 
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- creation_time int
- The time when this dataset was created, in milliseconds since the epoch.
- dataset_id str
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- default_collation str
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- default_encryption_ Datasetconfiguration Default Encryption Configuration Args 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- default_partition_ intexpiration_ ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- default_table_ intexpiration_ ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- delete_contents_ boolon_ destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description str
- A user-friendly description of the dataset
- effective_labels Mapping[str, str]
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag str
- A hash of the resource.
- external_catalog_ Datasetdataset_ options External Catalog Dataset Options Args 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- external_dataset_ Datasetreference External Dataset Reference Args 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendly_name str
- A descriptive name for the dataset
- is_case_ boolinsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Mapping[str, str]
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- last_modified_ inttime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- location str
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- max_time_ strtravel_ hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project str
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumi_labels Mapping[str, str]
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Mapping[str, str]
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- self_link str
- The URI of the created resource.
- storage_billing_ strmodel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
- accesses List<Property Map>
- An array of objects that define dataset access for one or more entities. Structure is documented below.
- creationTime Number
- The time when this dataset was created, in milliseconds since the epoch.
- datasetId String
- A unique ID for this dataset, without the project name. The ID
must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
- defaultCollation String
- Defines the default collation specification of future tables created
in the dataset. If a table is created in this dataset without table-level
default collation, then the table inherits the dataset default collation,
which is applied to the string fields that do not have explicit collation
specified. A change to this field affects only tables created afterwards,
and does not alter the existing tables.
The following values are supported:- 'und:ci': undetermined locale, case insensitive.
- '': empty string. Default to case-sensitive behavior.
 
- defaultEncryption Property MapConfiguration 
- The default encryption key for all tables in the dataset. Once this property is set, all newly-created partitioned tables in the dataset will have encryption key set to this value, unless table creation request (or query) overrides the key. Structure is documented below.
- defaultPartition NumberExpiration Ms 
- The default partition expiration for all partitioned tables in
the dataset, in milliseconds.
Once this property is set, all newly-created partitioned tables in
the dataset will have an expirationMsproperty in thetimePartitioningsettings set to this value, and changing the value will only affect new tables, not existing ones. The storage in a partition will have an expiration time of its partition time plus this value. Setting this property overrides the use ofdefaultTableExpirationMsfor partitioned tables: only one ofdefaultTableExpirationMsanddefaultPartitionExpirationMswill be used for any new partitioned table. If you provide an explicittimePartitioning.expirationMswhen creating or updating a partitioned table, that value takes precedence over the default partition expiration time indicated by this property.
- defaultTable NumberExpiration Ms 
- The default lifetime of all tables in the dataset, in milliseconds.
The minimum value is 3600000 milliseconds (one hour).
Once this property is set, all newly-created tables in the dataset
will have an expirationTimeproperty set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When theexpirationTimefor a given table is reached, that table will be deleted automatically. If a table'sexpirationTimeis modified or removed before the table expires, or if you provide an explicitexpirationTimewhen creating a table, that value takes precedence over the default expiration time indicated by this property.
- deleteContents BooleanOn Destroy 
- If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present.
- description String
- A user-friendly description of the dataset
- effectiveLabels Map<String>
- All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Pulumi, other clients and services.
- etag String
- A hash of the resource.
- externalCatalog Property MapDataset Options 
- Options defining open source compatible datasets living in the BigQuery catalog. Contains metadata of open source database, schema or namespace represented by the current dataset. Structure is documented below.
- externalDataset Property MapReference 
- Information about the external metadata storage where the dataset is defined. Structure is documented below.
- friendlyName String
- A descriptive name for the dataset
- isCase BooleanInsensitive 
- TRUE if the dataset and its table names are case-insensitive, otherwise FALSE. By default, this is FALSE, which means the dataset and its table names are case-sensitive. This field does not affect routine references.
- labels Map<String>
- The labels associated with this dataset. You can use these to organize and group your datasets. - Note: This field is non-authoritative, and will only manage the labels present in your configuration. Please refer to the field - effective_labelsfor all of the labels present on the resource.
- lastModified NumberTime 
- The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.
- location String
- The geographic location where the dataset should reside.
See official docs.
There are two types of locations, regional or multi-regional. A regional
location is a specific geographic place, such as Tokyo, and a multi-regional
location is a large geographic area, such as the United States, that
contains at least two geographic places.
The default value is multi-regional location US. Changing this forces a new resource to be created.
- maxTime StringTravel Hours 
- Defines the time travel window in hours. The value can be from 48 to 168 hours (2 to 7 days).
- project String
- The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- pulumiLabels Map<String>
- The combination of labels configured directly on the resource and default labels configured on the provider.
- Map<String>
- The tags attached to this table. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example "123456789012/environment" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example "Production". See Tag definitions for more details.
- selfLink String
- The URI of the created resource.
- storageBilling StringModel 
- Specifies the storage billing model for the dataset. Set this flag value to LOGICAL to use logical bytes for storage billing, or to PHYSICAL to use physical bytes instead. LOGICAL is the default if this flag isn't specified.
Supporting Types
DatasetAccess, DatasetAccessArgs    
- Condition
DatasetAccess Condition 
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- Dataset
DatasetAccess Dataset 
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- Domain string
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- GroupBy stringEmail 
- An email address of a Google Group to grant access to.
- IamMember string
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- Role string
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- Routine
DatasetAccess Routine 
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- SpecialGroup string
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- UserBy stringEmail 
- An email address of a user to grant access to. For example: fred@example.com
- View
DatasetAccess View 
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
- Condition
DatasetAccess Condition 
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- Dataset
DatasetAccess Dataset 
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- Domain string
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- GroupBy stringEmail 
- An email address of a Google Group to grant access to.
- IamMember string
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- Role string
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- Routine
DatasetAccess Routine 
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- SpecialGroup string
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- UserBy stringEmail 
- An email address of a user to grant access to. For example: fred@example.com
- View
DatasetAccess View 
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
- condition
DatasetAccess Condition 
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- dataset
DatasetAccess Dataset 
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- domain String
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- groupBy StringEmail 
- An email address of a Google Group to grant access to.
- iamMember String
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- role String
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- routine
DatasetAccess Routine 
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- specialGroup String
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- userBy StringEmail 
- An email address of a user to grant access to. For example: fred@example.com
- view
DatasetAccess View 
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
- condition
DatasetAccess Condition 
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- dataset
DatasetAccess Dataset 
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- domain string
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- groupBy stringEmail 
- An email address of a Google Group to grant access to.
- iamMember string
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- role string
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- routine
DatasetAccess Routine 
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- specialGroup string
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- userBy stringEmail 
- An email address of a user to grant access to. For example: fred@example.com
- view
DatasetAccess View 
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
- condition
DatasetAccess Condition 
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- dataset
DatasetAccess Dataset 
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- domain str
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- group_by_ stremail 
- An email address of a Google Group to grant access to.
- iam_member str
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- role str
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- routine
DatasetAccess Routine 
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- special_group str
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- user_by_ stremail 
- An email address of a user to grant access to. For example: fred@example.com
- view
DatasetAccess View 
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
- condition Property Map
- Condition for the binding. If CEL expression in this field is true, this access binding will be considered. Structure is documented below.
- dataset Property Map
- Grants all resources of particular types in a particular dataset read access to the current dataset. Structure is documented below.
- domain String
- A domain to grant access to. Any users signed in with the domain specified will be granted the specified access
- groupBy StringEmail 
- An email address of a Google Group to grant access to.
- iamMember String
- Some other type of member that appears in the IAM Policy but isn't a user,
group, domain, or special group. For example: allUsers
- role String
- Describes the rights granted to the user specified by the other member of the access object. Basic, predefined, and custom roles are supported. Predefined roles that have equivalent basic roles are swapped by the API to their basic counterparts. See official docs.
- routine Property Map
- A routine from a different dataset to grant access to. Queries executed against that routine will have read access to tables in this dataset. The role field is not required when this field is set. If that routine is updated by any user, access to the routine needs to be granted again via an update operation. Structure is documented below.
- specialGroup String
- A special group to grant access to. Possible values include:- projectOwners: Owners of the enclosing project.
- projectReaders: Readers of the enclosing project.
- projectWriters: Writers of the enclosing project.
- allAuthenticatedUsers: All authenticated BigQuery users.
 
- userBy StringEmail 
- An email address of a user to grant access to. For example: fred@example.com
- view Property Map
- A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation. Structure is documented below.
DatasetAccessCondition, DatasetAccessConditionArgs      
- Expression string
- Textual representation of an expression in Common Expression Language syntax.
- Description string
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- Location string
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- Title string
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
- Expression string
- Textual representation of an expression in Common Expression Language syntax.
- Description string
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- Location string
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- Title string
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
- expression String
- Textual representation of an expression in Common Expression Language syntax.
- description String
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- location String
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- title String
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
- expression string
- Textual representation of an expression in Common Expression Language syntax.
- description string
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- location string
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- title string
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
- expression str
- Textual representation of an expression in Common Expression Language syntax.
- description str
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- location str
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- title str
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
- expression String
- Textual representation of an expression in Common Expression Language syntax.
- description String
- Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI.
- location String
- String indicating the location of the expression for error reporting, e.g. a file name and a position in the file.
- title String
- Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression.
DatasetAccessDataset, DatasetAccessDatasetArgs      
- Dataset
DatasetAccess Dataset Dataset 
- The dataset this entry applies to Structure is documented below.
- TargetTypes List<string>
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
- Dataset
DatasetAccess Dataset Dataset 
- The dataset this entry applies to Structure is documented below.
- TargetTypes []string
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
- dataset
DatasetAccess Dataset Dataset 
- The dataset this entry applies to Structure is documented below.
- targetTypes List<String>
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
- dataset
DatasetAccess Dataset Dataset 
- The dataset this entry applies to Structure is documented below.
- targetTypes string[]
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
- dataset
DatasetAccess Dataset Dataset 
- The dataset this entry applies to Structure is documented below.
- target_types Sequence[str]
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
- dataset Property Map
- The dataset this entry applies to Structure is documented below.
- targetTypes List<String>
- Which resources in the dataset this entry applies to. Currently, only views are supported, but additional target types may be added in the future. Possible values: VIEWS
DatasetAccessDatasetDataset, DatasetAccessDatasetDatasetArgs        
- dataset_id str
- The ID of the dataset containing this table.
- project_id str
- The ID of the project containing this table.
DatasetAccessRoutine, DatasetAccessRoutineArgs      
- dataset_id str
- The ID of the dataset containing this table.
- project_id str
- The ID of the project containing this table.
- routine_id str
- The ID of the routine. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 256 characters.
DatasetAccessView, DatasetAccessViewArgs      
- dataset_id str
- The ID of the dataset containing this table.
- project_id str
- The ID of the project containing this table.
- table_id str
- The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.
DatasetDefaultEncryptionConfiguration, DatasetDefaultEncryptionConfigurationArgs        
- KmsKey stringName 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
- KmsKey stringName 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
- kmsKey StringName 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
- kmsKey stringName 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
- kms_key_ strname 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
- kmsKey StringName 
- Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key.
DatasetExternalCatalogDatasetOptions, DatasetExternalCatalogDatasetOptionsArgs          
- DefaultStorage stringLocation Uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- Parameters Dictionary<string, string>
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
- DefaultStorage stringLocation Uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- Parameters map[string]string
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
- defaultStorage StringLocation Uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- parameters Map<String,String>
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
- defaultStorage stringLocation Uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- parameters {[key: string]: string}
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
- default_storage_ strlocation_ uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- parameters Mapping[str, str]
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
- defaultStorage StringLocation Uri 
- The storage location URI for all tables in the dataset. Equivalent to hive metastore's database locationUri. Maximum length of 1024 characters.
- parameters Map<String>
- A map of key value pairs defining the parameters and properties of the open source schema. Maximum size of 2Mib.
DatasetExternalDatasetReference, DatasetExternalDatasetReferenceArgs        
- Connection string
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- ExternalSource string
- External source that backs this dataset.
- Connection string
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- ExternalSource string
- External source that backs this dataset.
- connection String
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- externalSource String
- External source that backs this dataset.
- connection string
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- externalSource string
- External source that backs this dataset.
- connection str
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- external_source str
- External source that backs this dataset.
- connection String
- The connection id that is used to access the externalSource. Format: projects/{projectId}/locations/{locationId}/connections/{connectionId}
- externalSource String
- External source that backs this dataset.
Import
Dataset can be imported using any of these accepted formats:
- projects/{{project}}/datasets/{{dataset_id}}
- {{project}}/{{dataset_id}}
- {{dataset_id}}
When using the pulumi import command, Dataset can be imported using one of the formats above. For example:
$ pulumi import gcp:bigquery/dataset:Dataset default projects/{{project}}/datasets/{{dataset_id}}
$ pulumi import gcp:bigquery/dataset:Dataset default {{project}}/{{dataset_id}}
$ pulumi import gcp:bigquery/dataset:Dataset default {{dataset_id}}
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the google-betaTerraform Provider.