We recommend using Azure Native.
azure.datafactory.IntegrationRuntimeRule
Explore with Pulumi AI
Manages a Data Factory Azure Integration Runtime.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
    name: "example-resources",
    location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
    name: "example",
    location: example.location,
    resourceGroupName: example.name,
});
const exampleIntegrationRuntimeRule = new azure.datafactory.IntegrationRuntimeRule("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    location: example.location,
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
    name="example-resources",
    location="West Europe")
example_factory = azure.datafactory.Factory("example",
    name="example",
    location=example.location,
    resource_group_name=example.name)
example_integration_runtime_rule = azure.datafactory.IntegrationRuntimeRule("example",
    name="example",
    data_factory_id=example_factory.id,
    location=example.location)
package main
import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("example-resources"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
			Name:              pulumi.String("example"),
			Location:          example.Location,
			ResourceGroupName: example.Name,
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewIntegrationRuntimeRule(ctx, "example", &datafactory.IntegrationRuntimeRuleArgs{
			Name:          pulumi.String("example"),
			DataFactoryId: exampleFactory.ID(),
			Location:      example.Location,
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() => 
{
    var example = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "example-resources",
        Location = "West Europe",
    });
    var exampleFactory = new Azure.DataFactory.Factory("example", new()
    {
        Name = "example",
        Location = example.Location,
        ResourceGroupName = example.Name,
    });
    var exampleIntegrationRuntimeRule = new Azure.DataFactory.IntegrationRuntimeRule("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        Location = example.Location,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.IntegrationRuntimeRule;
import com.pulumi.azure.datafactory.IntegrationRuntimeRuleArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new ResourceGroup("example", ResourceGroupArgs.builder()
            .name("example-resources")
            .location("West Europe")
            .build());
        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
            .name("example")
            .location(example.location())
            .resourceGroupName(example.name())
            .build());
        var exampleIntegrationRuntimeRule = new IntegrationRuntimeRule("exampleIntegrationRuntimeRule", IntegrationRuntimeRuleArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .location(example.location())
            .build());
    }
}
resources:
  example:
    type: azure:core:ResourceGroup
    properties:
      name: example-resources
      location: West Europe
  exampleFactory:
    type: azure:datafactory:Factory
    name: example
    properties:
      name: example
      location: ${example.location}
      resourceGroupName: ${example.name}
  exampleIntegrationRuntimeRule:
    type: azure:datafactory:IntegrationRuntimeRule
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      location: ${example.location}
Create IntegrationRuntimeRule Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new IntegrationRuntimeRule(name: string, args: IntegrationRuntimeRuleArgs, opts?: CustomResourceOptions);@overload
def IntegrationRuntimeRule(resource_name: str,
                           args: IntegrationRuntimeRuleArgs,
                           opts: Optional[ResourceOptions] = None)
@overload
def IntegrationRuntimeRule(resource_name: str,
                           opts: Optional[ResourceOptions] = None,
                           data_factory_id: Optional[str] = None,
                           cleanup_enabled: Optional[bool] = None,
                           compute_type: Optional[str] = None,
                           core_count: Optional[int] = None,
                           description: Optional[str] = None,
                           location: Optional[str] = None,
                           name: Optional[str] = None,
                           time_to_live_min: Optional[int] = None,
                           virtual_network_enabled: Optional[bool] = None)func NewIntegrationRuntimeRule(ctx *Context, name string, args IntegrationRuntimeRuleArgs, opts ...ResourceOption) (*IntegrationRuntimeRule, error)public IntegrationRuntimeRule(string name, IntegrationRuntimeRuleArgs args, CustomResourceOptions? opts = null)
public IntegrationRuntimeRule(String name, IntegrationRuntimeRuleArgs args)
public IntegrationRuntimeRule(String name, IntegrationRuntimeRuleArgs args, CustomResourceOptions options)
type: azure:datafactory:IntegrationRuntimeRule
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args IntegrationRuntimeRuleArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args IntegrationRuntimeRuleArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args IntegrationRuntimeRuleArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args IntegrationRuntimeRuleArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args IntegrationRuntimeRuleArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var integrationRuntimeRuleResource = new Azure.DataFactory.IntegrationRuntimeRule("integrationRuntimeRuleResource", new()
{
    DataFactoryId = "string",
    CleanupEnabled = false,
    ComputeType = "string",
    CoreCount = 0,
    Description = "string",
    Location = "string",
    Name = "string",
    TimeToLiveMin = 0,
    VirtualNetworkEnabled = false,
});
example, err := datafactory.NewIntegrationRuntimeRule(ctx, "integrationRuntimeRuleResource", &datafactory.IntegrationRuntimeRuleArgs{
	DataFactoryId:         pulumi.String("string"),
	CleanupEnabled:        pulumi.Bool(false),
	ComputeType:           pulumi.String("string"),
	CoreCount:             pulumi.Int(0),
	Description:           pulumi.String("string"),
	Location:              pulumi.String("string"),
	Name:                  pulumi.String("string"),
	TimeToLiveMin:         pulumi.Int(0),
	VirtualNetworkEnabled: pulumi.Bool(false),
})
var integrationRuntimeRuleResource = new IntegrationRuntimeRule("integrationRuntimeRuleResource", IntegrationRuntimeRuleArgs.builder()
    .dataFactoryId("string")
    .cleanupEnabled(false)
    .computeType("string")
    .coreCount(0)
    .description("string")
    .location("string")
    .name("string")
    .timeToLiveMin(0)
    .virtualNetworkEnabled(false)
    .build());
integration_runtime_rule_resource = azure.datafactory.IntegrationRuntimeRule("integrationRuntimeRuleResource",
    data_factory_id="string",
    cleanup_enabled=False,
    compute_type="string",
    core_count=0,
    description="string",
    location="string",
    name="string",
    time_to_live_min=0,
    virtual_network_enabled=False)
const integrationRuntimeRuleResource = new azure.datafactory.IntegrationRuntimeRule("integrationRuntimeRuleResource", {
    dataFactoryId: "string",
    cleanupEnabled: false,
    computeType: "string",
    coreCount: 0,
    description: "string",
    location: "string",
    name: "string",
    timeToLiveMin: 0,
    virtualNetworkEnabled: false,
});
type: azure:datafactory:IntegrationRuntimeRule
properties:
    cleanupEnabled: false
    computeType: string
    coreCount: 0
    dataFactoryId: string
    description: string
    location: string
    name: string
    timeToLiveMin: 0
    virtualNetworkEnabled: false
IntegrationRuntimeRule Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The IntegrationRuntimeRule resource accepts the following input properties:
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- CleanupEnabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- ComputeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- CoreCount int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- Description string
- Integration runtime description.
- Location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- Name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- TimeTo intLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- VirtualNetwork boolEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- CleanupEnabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- ComputeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- CoreCount int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- Description string
- Integration runtime description.
- Location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- Name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- TimeTo intLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- VirtualNetwork boolEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- cleanupEnabled Boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType String
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount Integer
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- description String
- Integration runtime description.
- location String
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name String
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo IntegerLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork BooleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- dataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- cleanupEnabled boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount number
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- description string
- Integration runtime description.
- location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo numberLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork booleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- data_factory_ strid 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- cleanup_enabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- compute_type str
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- core_count int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- description str
- Integration runtime description.
- location str
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name str
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- time_to_ intlive_ min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtual_network_ boolenabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- cleanupEnabled Boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType String
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount Number
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- description String
- Integration runtime description.
- location String
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name String
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo NumberLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork BooleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
Outputs
All input properties are implicitly available as output properties. Additionally, the IntegrationRuntimeRule resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing IntegrationRuntimeRule Resource
Get an existing IntegrationRuntimeRule resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: IntegrationRuntimeRuleState, opts?: CustomResourceOptions): IntegrationRuntimeRule@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        cleanup_enabled: Optional[bool] = None,
        compute_type: Optional[str] = None,
        core_count: Optional[int] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        location: Optional[str] = None,
        name: Optional[str] = None,
        time_to_live_min: Optional[int] = None,
        virtual_network_enabled: Optional[bool] = None) -> IntegrationRuntimeRulefunc GetIntegrationRuntimeRule(ctx *Context, name string, id IDInput, state *IntegrationRuntimeRuleState, opts ...ResourceOption) (*IntegrationRuntimeRule, error)public static IntegrationRuntimeRule Get(string name, Input<string> id, IntegrationRuntimeRuleState? state, CustomResourceOptions? opts = null)public static IntegrationRuntimeRule get(String name, Output<String> id, IntegrationRuntimeRuleState state, CustomResourceOptions options)resources:  _:    type: azure:datafactory:IntegrationRuntimeRule    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- CleanupEnabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- ComputeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- CoreCount int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- Integration runtime description.
- Location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- Name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- TimeTo intLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- VirtualNetwork boolEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- CleanupEnabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- ComputeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- CoreCount int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- Integration runtime description.
- Location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- Name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- TimeTo intLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- VirtualNetwork boolEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- cleanupEnabled Boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType String
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount Integer
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- Integration runtime description.
- location String
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name String
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo IntegerLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork BooleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- cleanupEnabled boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType string
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount number
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- dataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
- Integration runtime description.
- location string
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name string
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo numberLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork booleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- cleanup_enabled bool
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- compute_type str
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- core_count int
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- data_factory_ strid 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
- Integration runtime description.
- location str
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name str
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- time_to_ intlive_ min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtual_network_ boolenabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
- cleanupEnabled Boolean
- Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Defaults totrue.
- computeType String
- Compute type of the cluster which will execute data flow job. Valid values are General,ComputeOptimizedandMemoryOptimized. Defaults toGeneral.
- coreCount Number
- Core count of the cluster which will execute data flow job. Valid values are 8,16,32,48,80,144and272. Defaults to8.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- Integration runtime description.
- location String
- Specifies the supported Azure location where the resource exists. Use AutoResolveto create an auto-resolve integration runtime. Changing this forces a new resource to be created.
- name String
- Specifies the name of the Managed Integration Runtime. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- timeTo NumberLive Min 
- Time to live (in minutes) setting of the cluster which will execute data flow job. Defaults to 0.
- virtualNetwork BooleanEnabled 
- Is Integration Runtime compute provisioned within Managed Virtual Network? Changing this forces a new resource to be created.
Import
Data Factory Azure Integration Runtimes can be imported using the resource id, e.g.
$ pulumi import azure:datafactory/integrationRuntimeRule:IntegrationRuntimeRule example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/integrationruntimes/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the azurermTerraform Provider.