azure-native.synapse.BigDataPool
Explore with Pulumi AI
A Big Data pool Azure REST API version: 2021-06-01. Prior API version in Azure Native 1.x: 2021-03-01.
Other available API versions: 2021-05-01, 2021-06-01-preview.
Example Usage
Create or update a Big Data pool
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using AzureNative = Pulumi.AzureNative;
return await Deployment.RunAsync(() => 
{
    var bigDataPool = new AzureNative.Synapse.BigDataPool("bigDataPool", new()
    {
        AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
        {
            DelayInMinutes = 15,
            Enabled = true,
        },
        AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
        {
            Enabled = true,
            MaxNodeCount = 50,
            MinNodeCount = 3,
        },
        BigDataPoolName = "ExamplePool",
        DefaultSparkLogFolder = "/logs",
        IsAutotuneEnabled = false,
        LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
        {
            Content = "",
            Filename = "requirements.txt",
        },
        Location = "West US 2",
        NodeCount = 4,
        NodeSize = AzureNative.Synapse.NodeSize.Medium,
        NodeSizeFamily = AzureNative.Synapse.NodeSizeFamily.MemoryOptimized,
        ResourceGroupName = "ExampleResourceGroup",
        SparkEventsFolder = "/events",
        SparkVersion = "3.3",
        Tags = 
        {
            { "key", "value" },
        },
        WorkspaceName = "ExampleWorkspace",
    });
});
package main
import (
	synapse "github.com/pulumi/pulumi-azure-native-sdk/synapse/v2"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := synapse.NewBigDataPool(ctx, "bigDataPool", &synapse.BigDataPoolArgs{
			AutoPause: &synapse.AutoPausePropertiesArgs{
				DelayInMinutes: pulumi.Int(15),
				Enabled:        pulumi.Bool(true),
			},
			AutoScale: &synapse.AutoScalePropertiesArgs{
				Enabled:      pulumi.Bool(true),
				MaxNodeCount: pulumi.Int(50),
				MinNodeCount: pulumi.Int(3),
			},
			BigDataPoolName:       pulumi.String("ExamplePool"),
			DefaultSparkLogFolder: pulumi.String("/logs"),
			IsAutotuneEnabled:     pulumi.Bool(false),
			LibraryRequirements: &synapse.LibraryRequirementsArgs{
				Content:  pulumi.String(""),
				Filename: pulumi.String("requirements.txt"),
			},
			Location:          pulumi.String("West US 2"),
			NodeCount:         pulumi.Int(4),
			NodeSize:          pulumi.String(synapse.NodeSizeMedium),
			NodeSizeFamily:    pulumi.String(synapse.NodeSizeFamilyMemoryOptimized),
			ResourceGroupName: pulumi.String("ExampleResourceGroup"),
			SparkEventsFolder: pulumi.String("/events"),
			SparkVersion:      pulumi.String("3.3"),
			Tags: pulumi.StringMap{
				"key": pulumi.String("value"),
			},
			WorkspaceName: pulumi.String("ExampleWorkspace"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azurenative.synapse.BigDataPool;
import com.pulumi.azurenative.synapse.BigDataPoolArgs;
import com.pulumi.azurenative.synapse.inputs.AutoPausePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.AutoScalePropertiesArgs;
import com.pulumi.azurenative.synapse.inputs.LibraryRequirementsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var bigDataPool = new BigDataPool("bigDataPool", BigDataPoolArgs.builder()
            .autoPause(AutoPausePropertiesArgs.builder()
                .delayInMinutes(15)
                .enabled(true)
                .build())
            .autoScale(AutoScalePropertiesArgs.builder()
                .enabled(true)
                .maxNodeCount(50)
                .minNodeCount(3)
                .build())
            .bigDataPoolName("ExamplePool")
            .defaultSparkLogFolder("/logs")
            .isAutotuneEnabled(false)
            .libraryRequirements(LibraryRequirementsArgs.builder()
                .content("")
                .filename("requirements.txt")
                .build())
            .location("West US 2")
            .nodeCount(4)
            .nodeSize("Medium")
            .nodeSizeFamily("MemoryOptimized")
            .resourceGroupName("ExampleResourceGroup")
            .sparkEventsFolder("/events")
            .sparkVersion("3.3")
            .tags(Map.of("key", "value"))
            .workspaceName("ExampleWorkspace")
            .build());
    }
}
import * as pulumi from "@pulumi/pulumi";
import * as azure_native from "@pulumi/azure-native";
const bigDataPool = new azure_native.synapse.BigDataPool("bigDataPool", {
    autoPause: {
        delayInMinutes: 15,
        enabled: true,
    },
    autoScale: {
        enabled: true,
        maxNodeCount: 50,
        minNodeCount: 3,
    },
    bigDataPoolName: "ExamplePool",
    defaultSparkLogFolder: "/logs",
    isAutotuneEnabled: false,
    libraryRequirements: {
        content: "",
        filename: "requirements.txt",
    },
    location: "West US 2",
    nodeCount: 4,
    nodeSize: azure_native.synapse.NodeSize.Medium,
    nodeSizeFamily: azure_native.synapse.NodeSizeFamily.MemoryOptimized,
    resourceGroupName: "ExampleResourceGroup",
    sparkEventsFolder: "/events",
    sparkVersion: "3.3",
    tags: {
        key: "value",
    },
    workspaceName: "ExampleWorkspace",
});
import pulumi
import pulumi_azure_native as azure_native
big_data_pool = azure_native.synapse.BigDataPool("bigDataPool",
    auto_pause={
        "delay_in_minutes": 15,
        "enabled": True,
    },
    auto_scale={
        "enabled": True,
        "max_node_count": 50,
        "min_node_count": 3,
    },
    big_data_pool_name="ExamplePool",
    default_spark_log_folder="/logs",
    is_autotune_enabled=False,
    library_requirements={
        "content": "",
        "filename": "requirements.txt",
    },
    location="West US 2",
    node_count=4,
    node_size=azure_native.synapse.NodeSize.MEDIUM,
    node_size_family=azure_native.synapse.NodeSizeFamily.MEMORY_OPTIMIZED,
    resource_group_name="ExampleResourceGroup",
    spark_events_folder="/events",
    spark_version="3.3",
    tags={
        "key": "value",
    },
    workspace_name="ExampleWorkspace")
resources:
  bigDataPool:
    type: azure-native:synapse:BigDataPool
    properties:
      autoPause:
        delayInMinutes: 15
        enabled: true
      autoScale:
        enabled: true
        maxNodeCount: 50
        minNodeCount: 3
      bigDataPoolName: ExamplePool
      defaultSparkLogFolder: /logs
      isAutotuneEnabled: false
      libraryRequirements:
        content: ""
        filename: requirements.txt
      location: West US 2
      nodeCount: 4
      nodeSize: Medium
      nodeSizeFamily: MemoryOptimized
      resourceGroupName: ExampleResourceGroup
      sparkEventsFolder: /events
      sparkVersion: '3.3'
      tags:
        key: value
      workspaceName: ExampleWorkspace
Create BigDataPool Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new BigDataPool(name: string, args: BigDataPoolArgs, opts?: CustomResourceOptions);@overload
def BigDataPool(resource_name: str,
                args: BigDataPoolArgs,
                opts: Optional[ResourceOptions] = None)
@overload
def BigDataPool(resource_name: str,
                opts: Optional[ResourceOptions] = None,
                resource_group_name: Optional[str] = None,
                workspace_name: Optional[str] = None,
                library_requirements: Optional[LibraryRequirementsArgs] = None,
                node_size: Optional[Union[str, NodeSize]] = None,
                custom_libraries: Optional[Sequence[LibraryInfoArgs]] = None,
                default_spark_log_folder: Optional[str] = None,
                dynamic_executor_allocation: Optional[DynamicExecutorAllocationArgs] = None,
                force: Optional[bool] = None,
                is_autotune_enabled: Optional[bool] = None,
                is_compute_isolation_enabled: Optional[bool] = None,
                auto_pause: Optional[AutoPausePropertiesArgs] = None,
                location: Optional[str] = None,
                node_count: Optional[int] = None,
                cache_size: Optional[int] = None,
                node_size_family: Optional[Union[str, NodeSizeFamily]] = None,
                provisioning_state: Optional[str] = None,
                big_data_pool_name: Optional[str] = None,
                session_level_packages_enabled: Optional[bool] = None,
                spark_config_properties: Optional[SparkConfigPropertiesArgs] = None,
                spark_events_folder: Optional[str] = None,
                spark_version: Optional[str] = None,
                tags: Optional[Mapping[str, str]] = None,
                auto_scale: Optional[AutoScalePropertiesArgs] = None)func NewBigDataPool(ctx *Context, name string, args BigDataPoolArgs, opts ...ResourceOption) (*BigDataPool, error)public BigDataPool(string name, BigDataPoolArgs args, CustomResourceOptions? opts = null)
public BigDataPool(String name, BigDataPoolArgs args)
public BigDataPool(String name, BigDataPoolArgs args, CustomResourceOptions options)
type: azure-native:synapse:BigDataPool
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args BigDataPoolArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var bigDataPoolResource = new AzureNative.Synapse.BigDataPool("bigDataPoolResource", new()
{
    ResourceGroupName = "string",
    WorkspaceName = "string",
    LibraryRequirements = new AzureNative.Synapse.Inputs.LibraryRequirementsArgs
    {
        Content = "string",
        Filename = "string",
    },
    NodeSize = "string",
    CustomLibraries = new[]
    {
        new AzureNative.Synapse.Inputs.LibraryInfoArgs
        {
            ContainerName = "string",
            Name = "string",
            Path = "string",
            Type = "string",
        },
    },
    DefaultSparkLogFolder = "string",
    DynamicExecutorAllocation = new AzureNative.Synapse.Inputs.DynamicExecutorAllocationArgs
    {
        Enabled = false,
        MaxExecutors = 0,
        MinExecutors = 0,
    },
    Force = false,
    IsAutotuneEnabled = false,
    IsComputeIsolationEnabled = false,
    AutoPause = new AzureNative.Synapse.Inputs.AutoPausePropertiesArgs
    {
        DelayInMinutes = 0,
        Enabled = false,
    },
    Location = "string",
    NodeCount = 0,
    CacheSize = 0,
    NodeSizeFamily = "string",
    ProvisioningState = "string",
    BigDataPoolName = "string",
    SessionLevelPackagesEnabled = false,
    SparkConfigProperties = new AzureNative.Synapse.Inputs.SparkConfigPropertiesArgs
    {
        ConfigurationType = "string",
        Content = "string",
        Filename = "string",
    },
    SparkEventsFolder = "string",
    SparkVersion = "string",
    Tags = 
    {
        { "string", "string" },
    },
    AutoScale = new AzureNative.Synapse.Inputs.AutoScalePropertiesArgs
    {
        Enabled = false,
        MaxNodeCount = 0,
        MinNodeCount = 0,
    },
});
example, err := synapse.NewBigDataPool(ctx, "bigDataPoolResource", &synapse.BigDataPoolArgs{
	ResourceGroupName: pulumi.String("string"),
	WorkspaceName:     pulumi.String("string"),
	LibraryRequirements: &synapse.LibraryRequirementsArgs{
		Content:  pulumi.String("string"),
		Filename: pulumi.String("string"),
	},
	NodeSize: pulumi.String("string"),
	CustomLibraries: synapse.LibraryInfoArray{
		&synapse.LibraryInfoArgs{
			ContainerName: pulumi.String("string"),
			Name:          pulumi.String("string"),
			Path:          pulumi.String("string"),
			Type:          pulumi.String("string"),
		},
	},
	DefaultSparkLogFolder: pulumi.String("string"),
	DynamicExecutorAllocation: &synapse.DynamicExecutorAllocationArgs{
		Enabled:      pulumi.Bool(false),
		MaxExecutors: pulumi.Int(0),
		MinExecutors: pulumi.Int(0),
	},
	Force:                     pulumi.Bool(false),
	IsAutotuneEnabled:         pulumi.Bool(false),
	IsComputeIsolationEnabled: pulumi.Bool(false),
	AutoPause: &synapse.AutoPausePropertiesArgs{
		DelayInMinutes: pulumi.Int(0),
		Enabled:        pulumi.Bool(false),
	},
	Location:                    pulumi.String("string"),
	NodeCount:                   pulumi.Int(0),
	CacheSize:                   pulumi.Int(0),
	NodeSizeFamily:              pulumi.String("string"),
	ProvisioningState:           pulumi.String("string"),
	BigDataPoolName:             pulumi.String("string"),
	SessionLevelPackagesEnabled: pulumi.Bool(false),
	SparkConfigProperties: &synapse.SparkConfigPropertiesArgs{
		ConfigurationType: pulumi.String("string"),
		Content:           pulumi.String("string"),
		Filename:          pulumi.String("string"),
	},
	SparkEventsFolder: pulumi.String("string"),
	SparkVersion:      pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	AutoScale: &synapse.AutoScalePropertiesArgs{
		Enabled:      pulumi.Bool(false),
		MaxNodeCount: pulumi.Int(0),
		MinNodeCount: pulumi.Int(0),
	},
})
var bigDataPoolResource = new BigDataPool("bigDataPoolResource", BigDataPoolArgs.builder()
    .resourceGroupName("string")
    .workspaceName("string")
    .libraryRequirements(LibraryRequirementsArgs.builder()
        .content("string")
        .filename("string")
        .build())
    .nodeSize("string")
    .customLibraries(LibraryInfoArgs.builder()
        .containerName("string")
        .name("string")
        .path("string")
        .type("string")
        .build())
    .defaultSparkLogFolder("string")
    .dynamicExecutorAllocation(DynamicExecutorAllocationArgs.builder()
        .enabled(false)
        .maxExecutors(0)
        .minExecutors(0)
        .build())
    .force(false)
    .isAutotuneEnabled(false)
    .isComputeIsolationEnabled(false)
    .autoPause(AutoPausePropertiesArgs.builder()
        .delayInMinutes(0)
        .enabled(false)
        .build())
    .location("string")
    .nodeCount(0)
    .cacheSize(0)
    .nodeSizeFamily("string")
    .provisioningState("string")
    .bigDataPoolName("string")
    .sessionLevelPackagesEnabled(false)
    .sparkConfigProperties(SparkConfigPropertiesArgs.builder()
        .configurationType("string")
        .content("string")
        .filename("string")
        .build())
    .sparkEventsFolder("string")
    .sparkVersion("string")
    .tags(Map.of("string", "string"))
    .autoScale(AutoScalePropertiesArgs.builder()
        .enabled(false)
        .maxNodeCount(0)
        .minNodeCount(0)
        .build())
    .build());
big_data_pool_resource = azure_native.synapse.BigDataPool("bigDataPoolResource",
    resource_group_name="string",
    workspace_name="string",
    library_requirements={
        "content": "string",
        "filename": "string",
    },
    node_size="string",
    custom_libraries=[{
        "container_name": "string",
        "name": "string",
        "path": "string",
        "type": "string",
    }],
    default_spark_log_folder="string",
    dynamic_executor_allocation={
        "enabled": False,
        "max_executors": 0,
        "min_executors": 0,
    },
    force=False,
    is_autotune_enabled=False,
    is_compute_isolation_enabled=False,
    auto_pause={
        "delay_in_minutes": 0,
        "enabled": False,
    },
    location="string",
    node_count=0,
    cache_size=0,
    node_size_family="string",
    provisioning_state="string",
    big_data_pool_name="string",
    session_level_packages_enabled=False,
    spark_config_properties={
        "configuration_type": "string",
        "content": "string",
        "filename": "string",
    },
    spark_events_folder="string",
    spark_version="string",
    tags={
        "string": "string",
    },
    auto_scale={
        "enabled": False,
        "max_node_count": 0,
        "min_node_count": 0,
    })
const bigDataPoolResource = new azure_native.synapse.BigDataPool("bigDataPoolResource", {
    resourceGroupName: "string",
    workspaceName: "string",
    libraryRequirements: {
        content: "string",
        filename: "string",
    },
    nodeSize: "string",
    customLibraries: [{
        containerName: "string",
        name: "string",
        path: "string",
        type: "string",
    }],
    defaultSparkLogFolder: "string",
    dynamicExecutorAllocation: {
        enabled: false,
        maxExecutors: 0,
        minExecutors: 0,
    },
    force: false,
    isAutotuneEnabled: false,
    isComputeIsolationEnabled: false,
    autoPause: {
        delayInMinutes: 0,
        enabled: false,
    },
    location: "string",
    nodeCount: 0,
    cacheSize: 0,
    nodeSizeFamily: "string",
    provisioningState: "string",
    bigDataPoolName: "string",
    sessionLevelPackagesEnabled: false,
    sparkConfigProperties: {
        configurationType: "string",
        content: "string",
        filename: "string",
    },
    sparkEventsFolder: "string",
    sparkVersion: "string",
    tags: {
        string: "string",
    },
    autoScale: {
        enabled: false,
        maxNodeCount: 0,
        minNodeCount: 0,
    },
});
type: azure-native:synapse:BigDataPool
properties:
    autoPause:
        delayInMinutes: 0
        enabled: false
    autoScale:
        enabled: false
        maxNodeCount: 0
        minNodeCount: 0
    bigDataPoolName: string
    cacheSize: 0
    customLibraries:
        - containerName: string
          name: string
          path: string
          type: string
    defaultSparkLogFolder: string
    dynamicExecutorAllocation:
        enabled: false
        maxExecutors: 0
        minExecutors: 0
    force: false
    isAutotuneEnabled: false
    isComputeIsolationEnabled: false
    libraryRequirements:
        content: string
        filename: string
    location: string
    nodeCount: 0
    nodeSize: string
    nodeSizeFamily: string
    provisioningState: string
    resourceGroupName: string
    sessionLevelPackagesEnabled: false
    sparkConfigProperties:
        configurationType: string
        content: string
        filename: string
    sparkEventsFolder: string
    sparkVersion: string
    tags:
        string: string
    workspaceName: string
BigDataPool Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The BigDataPool resource accepts the following input properties:
- ResourceGroup stringName 
- The name of the resource group. The name is case insensitive.
- WorkspaceName string
- The name of the workspace.
- AutoPause Pulumi.Azure Native. Synapse. Inputs. Auto Pause Properties 
- Auto-pausing properties
- AutoScale Pulumi.Azure Native. Synapse. Inputs. Auto Scale Properties 
- Auto-scaling properties
- BigData stringPool Name 
- Big Data pool name
- CacheSize int
- The cache size
- CustomLibraries List<Pulumi.Azure Native. Synapse. Inputs. Library Info> 
- List of custom libraries/packages associated with the spark pool.
- DefaultSpark stringLog Folder 
- The default folder where Spark logs will be written.
- DynamicExecutor Pulumi.Allocation Azure Native. Synapse. Inputs. Dynamic Executor Allocation 
- Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- IsAutotune boolEnabled 
- Whether autotune is required or not.
- IsCompute boolIsolation Enabled 
- Whether compute isolation is required or not.
- LibraryRequirements Pulumi.Azure Native. Synapse. Inputs. Library Requirements 
- Library version requirements
- Location string
- The geo-location where the resource lives
- NodeCount int
- The number of nodes in the Big Data pool.
- NodeSize string | Pulumi.Azure Native. Synapse. Node Size 
- The level of compute power that each node in the Big Data pool has.
- NodeSize string | Pulumi.Family Azure Native. Synapse. Node Size Family 
- The kind of nodes that the Big Data pool provides.
- ProvisioningState string
- The state of the Big Data pool.
- SessionLevel boolPackages Enabled 
- Whether session level packages enabled.
- SparkConfig Pulumi.Properties Azure Native. Synapse. Inputs. Spark Config Properties 
- Spark configuration file to specify additional properties
- SparkEvents stringFolder 
- The Spark events folder
- SparkVersion string
- The Apache Spark version.
- Dictionary<string, string>
- Resource tags.
- ResourceGroup stringName 
- The name of the resource group. The name is case insensitive.
- WorkspaceName string
- The name of the workspace.
- AutoPause AutoPause Properties Args 
- Auto-pausing properties
- AutoScale AutoScale Properties Args 
- Auto-scaling properties
- BigData stringPool Name 
- Big Data pool name
- CacheSize int
- The cache size
- CustomLibraries []LibraryInfo Args 
- List of custom libraries/packages associated with the spark pool.
- DefaultSpark stringLog Folder 
- The default folder where Spark logs will be written.
- DynamicExecutor DynamicAllocation Executor Allocation Args 
- Dynamic Executor Allocation
- Force bool
- Whether to stop any running jobs in the Big Data pool
- IsAutotune boolEnabled 
- Whether autotune is required or not.
- IsCompute boolIsolation Enabled 
- Whether compute isolation is required or not.
- LibraryRequirements LibraryRequirements Args 
- Library version requirements
- Location string
- The geo-location where the resource lives
- NodeCount int
- The number of nodes in the Big Data pool.
- NodeSize string | NodeSize 
- The level of compute power that each node in the Big Data pool has.
- NodeSize string | NodeFamily Size Family 
- The kind of nodes that the Big Data pool provides.
- ProvisioningState string
- The state of the Big Data pool.
- SessionLevel boolPackages Enabled 
- Whether session level packages enabled.
- SparkConfig SparkProperties Config Properties Args 
- Spark configuration file to specify additional properties
- SparkEvents stringFolder 
- The Spark events folder
- SparkVersion string
- The Apache Spark version.
- map[string]string
- Resource tags.
- resourceGroup StringName 
- The name of the resource group. The name is case insensitive.
- workspaceName String
- The name of the workspace.
- autoPause AutoPause Properties 
- Auto-pausing properties
- autoScale AutoScale Properties 
- Auto-scaling properties
- bigData StringPool Name 
- Big Data pool name
- cacheSize Integer
- The cache size
- customLibraries List<LibraryInfo> 
- List of custom libraries/packages associated with the spark pool.
- defaultSpark StringLog Folder 
- The default folder where Spark logs will be written.
- dynamicExecutor DynamicAllocation Executor Allocation 
- Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- isAutotune BooleanEnabled 
- Whether autotune is required or not.
- isCompute BooleanIsolation Enabled 
- Whether compute isolation is required or not.
- libraryRequirements LibraryRequirements 
- Library version requirements
- location String
- The geo-location where the resource lives
- nodeCount Integer
- The number of nodes in the Big Data pool.
- nodeSize String | NodeSize 
- The level of compute power that each node in the Big Data pool has.
- nodeSize String | NodeFamily Size Family 
- The kind of nodes that the Big Data pool provides.
- provisioningState String
- The state of the Big Data pool.
- sessionLevel BooleanPackages Enabled 
- Whether session level packages enabled.
- sparkConfig SparkProperties Config Properties 
- Spark configuration file to specify additional properties
- sparkEvents StringFolder 
- The Spark events folder
- sparkVersion String
- The Apache Spark version.
- Map<String,String>
- Resource tags.
- resourceGroup stringName 
- The name of the resource group. The name is case insensitive.
- workspaceName string
- The name of the workspace.
- autoPause AutoPause Properties 
- Auto-pausing properties
- autoScale AutoScale Properties 
- Auto-scaling properties
- bigData stringPool Name 
- Big Data pool name
- cacheSize number
- The cache size
- customLibraries LibraryInfo[] 
- List of custom libraries/packages associated with the spark pool.
- defaultSpark stringLog Folder 
- The default folder where Spark logs will be written.
- dynamicExecutor DynamicAllocation Executor Allocation 
- Dynamic Executor Allocation
- force boolean
- Whether to stop any running jobs in the Big Data pool
- isAutotune booleanEnabled 
- Whether autotune is required or not.
- isCompute booleanIsolation Enabled 
- Whether compute isolation is required or not.
- libraryRequirements LibraryRequirements 
- Library version requirements
- location string
- The geo-location where the resource lives
- nodeCount number
- The number of nodes in the Big Data pool.
- nodeSize string | NodeSize 
- The level of compute power that each node in the Big Data pool has.
- nodeSize string | NodeFamily Size Family 
- The kind of nodes that the Big Data pool provides.
- provisioningState string
- The state of the Big Data pool.
- sessionLevel booleanPackages Enabled 
- Whether session level packages enabled.
- sparkConfig SparkProperties Config Properties 
- Spark configuration file to specify additional properties
- sparkEvents stringFolder 
- The Spark events folder
- sparkVersion string
- The Apache Spark version.
- {[key: string]: string}
- Resource tags.
- resource_group_ strname 
- The name of the resource group. The name is case insensitive.
- workspace_name str
- The name of the workspace.
- auto_pause AutoPause Properties Args 
- Auto-pausing properties
- auto_scale AutoScale Properties Args 
- Auto-scaling properties
- big_data_ strpool_ name 
- Big Data pool name
- cache_size int
- The cache size
- custom_libraries Sequence[LibraryInfo Args] 
- List of custom libraries/packages associated with the spark pool.
- default_spark_ strlog_ folder 
- The default folder where Spark logs will be written.
- dynamic_executor_ Dynamicallocation Executor Allocation Args 
- Dynamic Executor Allocation
- force bool
- Whether to stop any running jobs in the Big Data pool
- is_autotune_ boolenabled 
- Whether autotune is required or not.
- is_compute_ boolisolation_ enabled 
- Whether compute isolation is required or not.
- library_requirements LibraryRequirements Args 
- Library version requirements
- location str
- The geo-location where the resource lives
- node_count int
- The number of nodes in the Big Data pool.
- node_size str | NodeSize 
- The level of compute power that each node in the Big Data pool has.
- node_size_ str | Nodefamily Size Family 
- The kind of nodes that the Big Data pool provides.
- provisioning_state str
- The state of the Big Data pool.
- session_level_ boolpackages_ enabled 
- Whether session level packages enabled.
- spark_config_ Sparkproperties Config Properties Args 
- Spark configuration file to specify additional properties
- spark_events_ strfolder 
- The Spark events folder
- spark_version str
- The Apache Spark version.
- Mapping[str, str]
- Resource tags.
- resourceGroup StringName 
- The name of the resource group. The name is case insensitive.
- workspaceName String
- The name of the workspace.
- autoPause Property Map
- Auto-pausing properties
- autoScale Property Map
- Auto-scaling properties
- bigData StringPool Name 
- Big Data pool name
- cacheSize Number
- The cache size
- customLibraries List<Property Map>
- List of custom libraries/packages associated with the spark pool.
- defaultSpark StringLog Folder 
- The default folder where Spark logs will be written.
- dynamicExecutor Property MapAllocation 
- Dynamic Executor Allocation
- force Boolean
- Whether to stop any running jobs in the Big Data pool
- isAutotune BooleanEnabled 
- Whether autotune is required or not.
- isCompute BooleanIsolation Enabled 
- Whether compute isolation is required or not.
- libraryRequirements Property Map
- Library version requirements
- location String
- The geo-location where the resource lives
- nodeCount Number
- The number of nodes in the Big Data pool.
- nodeSize String | "None" | "Small" | "Medium" | "Large" | "XLarge" | "XXLarge" | "XXXLarge"
- The level of compute power that each node in the Big Data pool has.
- nodeSize String | "None" | "MemoryFamily Optimized" | "Hardware Accelerated FPGA" | "Hardware Accelerated GPU" 
- The kind of nodes that the Big Data pool provides.
- provisioningState String
- The state of the Big Data pool.
- sessionLevel BooleanPackages Enabled 
- Whether session level packages enabled.
- sparkConfig Property MapProperties 
- Spark configuration file to specify additional properties
- sparkEvents StringFolder 
- The Spark events folder
- sparkVersion String
- The Apache Spark version.
- Map<String>
- Resource tags.
Outputs
All input properties are implicitly available as output properties. Additionally, the BigDataPool resource produces the following output properties:
- CreationDate string
- The time when the Big Data pool was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastSucceeded stringTimestamp 
- The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- CreationDate string
- The time when the Big Data pool was created.
- Id string
- The provider-assigned unique ID for this managed resource.
- LastSucceeded stringTimestamp 
- The time when the Big Data pool was updated successfully.
- Name string
- The name of the resource
- Type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creationDate String
- The time when the Big Data pool was created.
- id String
- The provider-assigned unique ID for this managed resource.
- lastSucceeded StringTimestamp 
- The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creationDate string
- The time when the Big Data pool was created.
- id string
- The provider-assigned unique ID for this managed resource.
- lastSucceeded stringTimestamp 
- The time when the Big Data pool was updated successfully.
- name string
- The name of the resource
- type string
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creation_date str
- The time when the Big Data pool was created.
- id str
- The provider-assigned unique ID for this managed resource.
- last_succeeded_ strtimestamp 
- The time when the Big Data pool was updated successfully.
- name str
- The name of the resource
- type str
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
- creationDate String
- The time when the Big Data pool was created.
- id String
- The provider-assigned unique ID for this managed resource.
- lastSucceeded StringTimestamp 
- The time when the Big Data pool was updated successfully.
- name String
- The name of the resource
- type String
- The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Supporting Types
AutoPauseProperties, AutoPausePropertiesArgs      
- DelayIn intMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- DelayIn intMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn IntegerMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn numberMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_in_ intminutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn NumberMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoPausePropertiesResponse, AutoPausePropertiesResponseArgs        
- DelayIn intMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- DelayIn intMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- Enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn IntegerMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn numberMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled boolean
- Whether auto-pausing is enabled for the Big Data pool.
- delay_in_ intminutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled bool
- Whether auto-pausing is enabled for the Big Data pool.
- delayIn NumberMinutes 
- Number of minutes of idle time before the Big Data pool is automatically paused.
- enabled Boolean
- Whether auto-pausing is enabled for the Big Data pool.
AutoScaleProperties, AutoScalePropertiesArgs      
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- MaxNode intCount 
- The maximum number of nodes the Big Data pool can support.
- MinNode intCount 
- The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- MaxNode intCount 
- The maximum number of nodes the Big Data pool can support.
- MinNode intCount 
- The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode IntegerCount 
- The maximum number of nodes the Big Data pool can support.
- minNode IntegerCount 
- The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode numberCount 
- The maximum number of nodes the Big Data pool can support.
- minNode numberCount 
- The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_node_ intcount 
- The maximum number of nodes the Big Data pool can support.
- min_node_ intcount 
- The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode NumberCount 
- The maximum number of nodes the Big Data pool can support.
- minNode NumberCount 
- The minimum number of nodes the Big Data pool can support.
AutoScalePropertiesResponse, AutoScalePropertiesResponseArgs        
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- MaxNode intCount 
- The maximum number of nodes the Big Data pool can support.
- MinNode intCount 
- The minimum number of nodes the Big Data pool can support.
- Enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- MaxNode intCount 
- The maximum number of nodes the Big Data pool can support.
- MinNode intCount 
- The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode IntegerCount 
- The maximum number of nodes the Big Data pool can support.
- minNode IntegerCount 
- The minimum number of nodes the Big Data pool can support.
- enabled boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode numberCount 
- The maximum number of nodes the Big Data pool can support.
- minNode numberCount 
- The minimum number of nodes the Big Data pool can support.
- enabled bool
- Whether automatic scaling is enabled for the Big Data pool.
- max_node_ intcount 
- The maximum number of nodes the Big Data pool can support.
- min_node_ intcount 
- The minimum number of nodes the Big Data pool can support.
- enabled Boolean
- Whether automatic scaling is enabled for the Big Data pool.
- maxNode NumberCount 
- The maximum number of nodes the Big Data pool can support.
- minNode NumberCount 
- The minimum number of nodes the Big Data pool can support.
ConfigurationType, ConfigurationTypeArgs    
- File
- File
- Artifact
- Artifact
- ConfigurationType File 
- File
- ConfigurationType Artifact 
- Artifact
- File
- File
- Artifact
- Artifact
- File
- File
- Artifact
- Artifact
- FILE
- File
- ARTIFACT
- Artifact
- "File"
- File
- "Artifact"
- Artifact
DynamicExecutorAllocation, DynamicExecutorAllocationArgs      
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- MaxExecutors int
- The maximum number of executors alloted
- MinExecutors int
- The minimum number of executors alloted
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- MaxExecutors int
- The maximum number of executors alloted
- MinExecutors int
- The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors Integer
- The maximum number of executors alloted
- minExecutors Integer
- The minimum number of executors alloted
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors number
- The maximum number of executors alloted
- minExecutors number
- The minimum number of executors alloted
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max_executors int
- The maximum number of executors alloted
- min_executors int
- The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors Number
- The maximum number of executors alloted
- minExecutors Number
- The minimum number of executors alloted
DynamicExecutorAllocationResponse, DynamicExecutorAllocationResponseArgs        
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- MaxExecutors int
- The maximum number of executors alloted
- MinExecutors int
- The minimum number of executors alloted
- Enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- MaxExecutors int
- The maximum number of executors alloted
- MinExecutors int
- The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors Integer
- The maximum number of executors alloted
- minExecutors Integer
- The minimum number of executors alloted
- enabled boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors number
- The maximum number of executors alloted
- minExecutors number
- The minimum number of executors alloted
- enabled bool
- Indicates whether Dynamic Executor Allocation is enabled or not.
- max_executors int
- The maximum number of executors alloted
- min_executors int
- The minimum number of executors alloted
- enabled Boolean
- Indicates whether Dynamic Executor Allocation is enabled or not.
- maxExecutors Number
- The maximum number of executors alloted
- minExecutors Number
- The minimum number of executors alloted
LibraryInfo, LibraryInfoArgs    
- ContainerName string
- Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- ContainerName string
- Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- containerName String
- Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- containerName string
- Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- container_name str
- Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- containerName String
- Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryInfoResponse, LibraryInfoResponseArgs      
- CreatorId string
- Creator Id of the library/package.
- ProvisioningStatus string
- Provisioning status of the library/package.
- UploadedTimestamp string
- The last update time of the library.
- ContainerName string
- Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- CreatorId string
- Creator Id of the library/package.
- ProvisioningStatus string
- Provisioning status of the library/package.
- UploadedTimestamp string
- The last update time of the library.
- ContainerName string
- Storage blob container name.
- Name string
- Name of the library.
- Path string
- Storage blob path of library.
- Type string
- Type of the library.
- creatorId String
- Creator Id of the library/package.
- provisioningStatus String
- Provisioning status of the library/package.
- uploadedTimestamp String
- The last update time of the library.
- containerName String
- Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
- creatorId string
- Creator Id of the library/package.
- provisioningStatus string
- Provisioning status of the library/package.
- uploadedTimestamp string
- The last update time of the library.
- containerName string
- Storage blob container name.
- name string
- Name of the library.
- path string
- Storage blob path of library.
- type string
- Type of the library.
- creator_id str
- Creator Id of the library/package.
- provisioning_status str
- Provisioning status of the library/package.
- uploaded_timestamp str
- The last update time of the library.
- container_name str
- Storage blob container name.
- name str
- Name of the library.
- path str
- Storage blob path of library.
- type str
- Type of the library.
- creatorId String
- Creator Id of the library/package.
- provisioningStatus String
- Provisioning status of the library/package.
- uploadedTimestamp String
- The last update time of the library.
- containerName String
- Storage blob container name.
- name String
- Name of the library.
- path String
- Storage blob path of library.
- type String
- Type of the library.
LibraryRequirements, LibraryRequirementsArgs    
LibraryRequirementsResponse, LibraryRequirementsResponseArgs      
NodeSize, NodeSizeArgs    
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- NodeSize None 
- None
- NodeSize Small 
- Small
- NodeSize Medium 
- Medium
- NodeSize Large 
- Large
- NodeSize XLarge 
- XLarge
- NodeSize XXLarge 
- XXLarge
- NodeSize XXXLarge 
- XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- None
- None
- Small
- Small
- Medium
- Medium
- Large
- Large
- XLarge
- XLarge
- XXLarge
- XXLarge
- XXXLarge
- XXXLarge
- NONE
- None
- SMALL
- Small
- MEDIUM
- Medium
- LARGE
- Large
- X_LARGE
- XLarge
- XX_LARGE
- XXLarge
- XXX_LARGE
- XXXLarge
- "None"
- None
- "Small"
- Small
- "Medium"
- Medium
- "Large"
- Large
- "XLarge"
- XLarge
- "XXLarge"
- XXLarge
- "XXXLarge"
- XXXLarge
NodeSizeFamily, NodeSizeFamilyArgs      
- None
- None
- MemoryOptimized 
- MemoryOptimized
- HardwareAccelerated FPGA 
- HardwareAcceleratedFPGA
- HardwareAccelerated GPU 
- HardwareAcceleratedGPU
- NodeSize Family None 
- None
- NodeSize Family Memory Optimized 
- MemoryOptimized
- NodeSize Family Hardware Accelerated FPGA 
- HardwareAcceleratedFPGA
- NodeSize Family Hardware Accelerated GPU 
- HardwareAcceleratedGPU
- None
- None
- MemoryOptimized 
- MemoryOptimized
- HardwareAccelerated FPGA 
- HardwareAcceleratedFPGA
- HardwareAccelerated GPU 
- HardwareAcceleratedGPU
- None
- None
- MemoryOptimized 
- MemoryOptimized
- HardwareAccelerated FPGA 
- HardwareAcceleratedFPGA
- HardwareAccelerated GPU 
- HardwareAcceleratedGPU
- NONE
- None
- MEMORY_OPTIMIZED
- MemoryOptimized
- HARDWARE_ACCELERATED_FPGA
- HardwareAcceleratedFPGA
- HARDWARE_ACCELERATED_GPU
- HardwareAcceleratedGPU
- "None"
- None
- "MemoryOptimized" 
- MemoryOptimized
- "HardwareAccelerated FPGA" 
- HardwareAcceleratedFPGA
- "HardwareAccelerated GPU" 
- HardwareAcceleratedGPU
SparkConfigProperties, SparkConfigPropertiesArgs      
- ConfigurationType string | Pulumi.Azure Native. Synapse. Configuration Type 
- The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- ConfigurationType string | ConfigurationType 
- The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- configurationType String | ConfigurationType 
- The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
- configurationType string | ConfigurationType 
- The type of the spark config properties file.
- content string
- The spark config properties.
- filename string
- The filename of the spark config properties file.
- configuration_type str | ConfigurationType 
- The type of the spark config properties file.
- content str
- The spark config properties.
- filename str
- The filename of the spark config properties file.
- configurationType String | "File" | "Artifact"
- The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
SparkConfigPropertiesResponse, SparkConfigPropertiesResponseArgs        
- Time string
- The last update time of the spark config properties file.
- ConfigurationType string
- The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- Time string
- The last update time of the spark config properties file.
- ConfigurationType string
- The type of the spark config properties file.
- Content string
- The spark config properties.
- Filename string
- The filename of the spark config properties file.
- time String
- The last update time of the spark config properties file.
- configurationType String
- The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
- time string
- The last update time of the spark config properties file.
- configurationType string
- The type of the spark config properties file.
- content string
- The spark config properties.
- filename string
- The filename of the spark config properties file.
- time str
- The last update time of the spark config properties file.
- configuration_type str
- The type of the spark config properties file.
- content str
- The spark config properties.
- filename str
- The filename of the spark config properties file.
- time String
- The last update time of the spark config properties file.
- configurationType String
- The type of the spark config properties file.
- content String
- The spark config properties.
- filename String
- The filename of the spark config properties file.
Import
An existing resource can be imported using its type token, name, and identifier, e.g.
$ pulumi import azure-native:synapse:BigDataPool ExamplePool /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/bigDataPools/{bigDataPoolName} 
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Native pulumi/pulumi-azure-native
- License
- Apache-2.0