We recommend using Azure Native.
azure.streamanalytics.OutputTable
Explore with Pulumi AI
Manages a Stream Analytics Output Table.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const exampleResourceGroup = new azure.core.ResourceGroup("example", {
    name: "rg-example",
    location: "West Europe",
});
const example = azure.streamanalytics.getJobOutput({
    name: "example-job",
    resourceGroupName: exampleResourceGroup.name,
});
const exampleAccount = new azure.storage.Account("example", {
    name: "examplesa",
    resourceGroupName: exampleResourceGroup.name,
    location: exampleResourceGroup.location,
    accountTier: "Standard",
    accountReplicationType: "LRS",
});
const exampleTable = new azure.storage.Table("example", {
    name: "exampletable",
    storageAccountName: exampleAccount.name,
});
const exampleOutputTable = new azure.streamanalytics.OutputTable("example", {
    name: "output-to-storage-table",
    streamAnalyticsJobName: example.apply(example => example.name),
    resourceGroupName: example.apply(example => example.resourceGroupName),
    storageAccountName: exampleAccount.name,
    storageAccountKey: exampleAccount.primaryAccessKey,
    table: exampleTable.name,
    partitionKey: "foo",
    rowKey: "bar",
    batchSize: 100,
});
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("example",
    name="rg-example",
    location="West Europe")
example = azure.streamanalytics.get_job_output(name="example-job",
    resource_group_name=example_resource_group.name)
example_account = azure.storage.Account("example",
    name="examplesa",
    resource_group_name=example_resource_group.name,
    location=example_resource_group.location,
    account_tier="Standard",
    account_replication_type="LRS")
example_table = azure.storage.Table("example",
    name="exampletable",
    storage_account_name=example_account.name)
example_output_table = azure.streamanalytics.OutputTable("example",
    name="output-to-storage-table",
    stream_analytics_job_name=example.name,
    resource_group_name=example.resource_group_name,
    storage_account_name=example_account.name,
    storage_account_key=example_account.primary_access_key,
    table=example_table.name,
    partition_key="foo",
    row_key="bar",
    batch_size=100)
package main
import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/storage"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/streamanalytics"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		exampleResourceGroup, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("rg-example"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		example := streamanalytics.LookupJobOutput(ctx, streamanalytics.GetJobOutputArgs{
			Name:              pulumi.String("example-job"),
			ResourceGroupName: exampleResourceGroup.Name,
		}, nil)
		exampleAccount, err := storage.NewAccount(ctx, "example", &storage.AccountArgs{
			Name:                   pulumi.String("examplesa"),
			ResourceGroupName:      exampleResourceGroup.Name,
			Location:               exampleResourceGroup.Location,
			AccountTier:            pulumi.String("Standard"),
			AccountReplicationType: pulumi.String("LRS"),
		})
		if err != nil {
			return err
		}
		exampleTable, err := storage.NewTable(ctx, "example", &storage.TableArgs{
			Name:               pulumi.String("exampletable"),
			StorageAccountName: exampleAccount.Name,
		})
		if err != nil {
			return err
		}
		_, err = streamanalytics.NewOutputTable(ctx, "example", &streamanalytics.OutputTableArgs{
			Name: pulumi.String("output-to-storage-table"),
			StreamAnalyticsJobName: pulumi.String(example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
				return &example.Name, nil
			}).(pulumi.StringPtrOutput)),
			ResourceGroupName: pulumi.String(example.ApplyT(func(example streamanalytics.GetJobResult) (*string, error) {
				return &example.ResourceGroupName, nil
			}).(pulumi.StringPtrOutput)),
			StorageAccountName: exampleAccount.Name,
			StorageAccountKey:  exampleAccount.PrimaryAccessKey,
			Table:              exampleTable.Name,
			PartitionKey:       pulumi.String("foo"),
			RowKey:             pulumi.String("bar"),
			BatchSize:          pulumi.Int(100),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() => 
{
    var exampleResourceGroup = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "rg-example",
        Location = "West Europe",
    });
    var example = Azure.StreamAnalytics.GetJob.Invoke(new()
    {
        Name = "example-job",
        ResourceGroupName = exampleResourceGroup.Name,
    });
    var exampleAccount = new Azure.Storage.Account("example", new()
    {
        Name = "examplesa",
        ResourceGroupName = exampleResourceGroup.Name,
        Location = exampleResourceGroup.Location,
        AccountTier = "Standard",
        AccountReplicationType = "LRS",
    });
    var exampleTable = new Azure.Storage.Table("example", new()
    {
        Name = "exampletable",
        StorageAccountName = exampleAccount.Name,
    });
    var exampleOutputTable = new Azure.StreamAnalytics.OutputTable("example", new()
    {
        Name = "output-to-storage-table",
        StreamAnalyticsJobName = example.Apply(getJobResult => getJobResult.Name),
        ResourceGroupName = example.Apply(getJobResult => getJobResult.ResourceGroupName),
        StorageAccountName = exampleAccount.Name,
        StorageAccountKey = exampleAccount.PrimaryAccessKey,
        Table = exampleTable.Name,
        PartitionKey = "foo",
        RowKey = "bar",
        BatchSize = 100,
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.streamanalytics.StreamanalyticsFunctions;
import com.pulumi.azure.streamanalytics.inputs.GetJobArgs;
import com.pulumi.azure.storage.Account;
import com.pulumi.azure.storage.AccountArgs;
import com.pulumi.azure.storage.Table;
import com.pulumi.azure.storage.TableArgs;
import com.pulumi.azure.streamanalytics.OutputTable;
import com.pulumi.azure.streamanalytics.OutputTableArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var exampleResourceGroup = new ResourceGroup("exampleResourceGroup", ResourceGroupArgs.builder()
            .name("rg-example")
            .location("West Europe")
            .build());
        final var example = StreamanalyticsFunctions.getJob(GetJobArgs.builder()
            .name("example-job")
            .resourceGroupName(exampleResourceGroup.name())
            .build());
        var exampleAccount = new Account("exampleAccount", AccountArgs.builder()
            .name("examplesa")
            .resourceGroupName(exampleResourceGroup.name())
            .location(exampleResourceGroup.location())
            .accountTier("Standard")
            .accountReplicationType("LRS")
            .build());
        var exampleTable = new Table("exampleTable", TableArgs.builder()
            .name("exampletable")
            .storageAccountName(exampleAccount.name())
            .build());
        var exampleOutputTable = new OutputTable("exampleOutputTable", OutputTableArgs.builder()
            .name("output-to-storage-table")
            .streamAnalyticsJobName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.name())))
            .resourceGroupName(example.applyValue(getJobResult -> getJobResult).applyValue(example -> example.applyValue(getJobResult -> getJobResult.resourceGroupName())))
            .storageAccountName(exampleAccount.name())
            .storageAccountKey(exampleAccount.primaryAccessKey())
            .table(exampleTable.name())
            .partitionKey("foo")
            .rowKey("bar")
            .batchSize(100)
            .build());
    }
}
resources:
  exampleResourceGroup:
    type: azure:core:ResourceGroup
    name: example
    properties:
      name: rg-example
      location: West Europe
  exampleAccount:
    type: azure:storage:Account
    name: example
    properties:
      name: examplesa
      resourceGroupName: ${exampleResourceGroup.name}
      location: ${exampleResourceGroup.location}
      accountTier: Standard
      accountReplicationType: LRS
  exampleTable:
    type: azure:storage:Table
    name: example
    properties:
      name: exampletable
      storageAccountName: ${exampleAccount.name}
  exampleOutputTable:
    type: azure:streamanalytics:OutputTable
    name: example
    properties:
      name: output-to-storage-table
      streamAnalyticsJobName: ${example.name}
      resourceGroupName: ${example.resourceGroupName}
      storageAccountName: ${exampleAccount.name}
      storageAccountKey: ${exampleAccount.primaryAccessKey}
      table: ${exampleTable.name}
      partitionKey: foo
      rowKey: bar
      batchSize: 100
variables:
  example:
    fn::invoke:
      function: azure:streamanalytics:getJob
      arguments:
        name: example-job
        resourceGroupName: ${exampleResourceGroup.name}
Create OutputTable Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new OutputTable(name: string, args: OutputTableArgs, opts?: CustomResourceOptions);@overload
def OutputTable(resource_name: str,
                args: OutputTableArgs,
                opts: Optional[ResourceOptions] = None)
@overload
def OutputTable(resource_name: str,
                opts: Optional[ResourceOptions] = None,
                batch_size: Optional[int] = None,
                partition_key: Optional[str] = None,
                resource_group_name: Optional[str] = None,
                row_key: Optional[str] = None,
                storage_account_key: Optional[str] = None,
                storage_account_name: Optional[str] = None,
                stream_analytics_job_name: Optional[str] = None,
                table: Optional[str] = None,
                columns_to_removes: Optional[Sequence[str]] = None,
                name: Optional[str] = None)func NewOutputTable(ctx *Context, name string, args OutputTableArgs, opts ...ResourceOption) (*OutputTable, error)public OutputTable(string name, OutputTableArgs args, CustomResourceOptions? opts = null)
public OutputTable(String name, OutputTableArgs args)
public OutputTable(String name, OutputTableArgs args, CustomResourceOptions options)
type: azure:streamanalytics:OutputTable
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args OutputTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args OutputTableArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args OutputTableArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args OutputTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args OutputTableArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var outputTableResource = new Azure.StreamAnalytics.OutputTable("outputTableResource", new()
{
    BatchSize = 0,
    PartitionKey = "string",
    ResourceGroupName = "string",
    RowKey = "string",
    StorageAccountKey = "string",
    StorageAccountName = "string",
    StreamAnalyticsJobName = "string",
    Table = "string",
    ColumnsToRemoves = new[]
    {
        "string",
    },
    Name = "string",
});
example, err := streamanalytics.NewOutputTable(ctx, "outputTableResource", &streamanalytics.OutputTableArgs{
	BatchSize:              pulumi.Int(0),
	PartitionKey:           pulumi.String("string"),
	ResourceGroupName:      pulumi.String("string"),
	RowKey:                 pulumi.String("string"),
	StorageAccountKey:      pulumi.String("string"),
	StorageAccountName:     pulumi.String("string"),
	StreamAnalyticsJobName: pulumi.String("string"),
	Table:                  pulumi.String("string"),
	ColumnsToRemoves: pulumi.StringArray{
		pulumi.String("string"),
	},
	Name: pulumi.String("string"),
})
var outputTableResource = new OutputTable("outputTableResource", OutputTableArgs.builder()
    .batchSize(0)
    .partitionKey("string")
    .resourceGroupName("string")
    .rowKey("string")
    .storageAccountKey("string")
    .storageAccountName("string")
    .streamAnalyticsJobName("string")
    .table("string")
    .columnsToRemoves("string")
    .name("string")
    .build());
output_table_resource = azure.streamanalytics.OutputTable("outputTableResource",
    batch_size=0,
    partition_key="string",
    resource_group_name="string",
    row_key="string",
    storage_account_key="string",
    storage_account_name="string",
    stream_analytics_job_name="string",
    table="string",
    columns_to_removes=["string"],
    name="string")
const outputTableResource = new azure.streamanalytics.OutputTable("outputTableResource", {
    batchSize: 0,
    partitionKey: "string",
    resourceGroupName: "string",
    rowKey: "string",
    storageAccountKey: "string",
    storageAccountName: "string",
    streamAnalyticsJobName: "string",
    table: "string",
    columnsToRemoves: ["string"],
    name: "string",
});
type: azure:streamanalytics:OutputTable
properties:
    batchSize: 0
    columnsToRemoves:
        - string
    name: string
    partitionKey: string
    resourceGroupName: string
    rowKey: string
    storageAccountKey: string
    storageAccountName: string
    streamAnalyticsJobName: string
    table: string
OutputTable Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The OutputTable resource accepts the following input properties:
- BatchSize int
- The number of records for a batch operation. Must be between 1and100.
- PartitionKey string
- The name of the output column that contains the partition key.
- ResourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- RowKey string
- The name of the output column that contains the row key.
- StorageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- StorageAccount stringName 
- The name of the Storage Account.
- StreamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Table string
- The name of the table where the stream should be output to.
- ColumnsTo List<string>Removes 
- A list of the column names to be removed from output event entities.
- Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- BatchSize int
- The number of records for a batch operation. Must be between 1and100.
- PartitionKey string
- The name of the output column that contains the partition key.
- ResourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- RowKey string
- The name of the output column that contains the row key.
- StorageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- StorageAccount stringName 
- The name of the Storage Account.
- StreamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Table string
- The name of the table where the stream should be output to.
- ColumnsTo []stringRemoves 
- A list of the column names to be removed from output event entities.
- Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- batchSize Integer
- The number of records for a batch operation. Must be between 1and100.
- partitionKey String
- The name of the output column that contains the partition key.
- resourceGroup StringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey String
- The name of the output column that contains the row key.
- storageAccount StringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount StringName 
- The name of the Storage Account.
- streamAnalytics StringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table String
- The name of the table where the stream should be output to.
- columnsTo List<String>Removes 
- A list of the column names to be removed from output event entities.
- name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- batchSize number
- The number of records for a batch operation. Must be between 1and100.
- partitionKey string
- The name of the output column that contains the partition key.
- resourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey string
- The name of the output column that contains the row key.
- storageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount stringName 
- The name of the Storage Account.
- streamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table string
- The name of the table where the stream should be output to.
- columnsTo string[]Removes 
- A list of the column names to be removed from output event entities.
- name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- batch_size int
- The number of records for a batch operation. Must be between 1and100.
- partition_key str
- The name of the output column that contains the partition key.
- resource_group_ strname 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- row_key str
- The name of the output column that contains the row key.
- storage_account_ strkey 
- The Access Key which should be used to connect to this Storage Account.
- storage_account_ strname 
- The name of the Storage Account.
- stream_analytics_ strjob_ name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table str
- The name of the table where the stream should be output to.
- columns_to_ Sequence[str]removes 
- A list of the column names to be removed from output event entities.
- name str
- The name of the Stream Output. Changing this forces a new resource to be created.
- batchSize Number
- The number of records for a batch operation. Must be between 1and100.
- partitionKey String
- The name of the output column that contains the partition key.
- resourceGroup StringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey String
- The name of the output column that contains the row key.
- storageAccount StringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount StringName 
- The name of the Storage Account.
- streamAnalytics StringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table String
- The name of the table where the stream should be output to.
- columnsTo List<String>Removes 
- A list of the column names to be removed from output event entities.
- name String
- The name of the Stream Output. Changing this forces a new resource to be created.
Outputs
All input properties are implicitly available as output properties. Additionally, the OutputTable resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing OutputTable Resource
Get an existing OutputTable resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: OutputTableState, opts?: CustomResourceOptions): OutputTable@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        batch_size: Optional[int] = None,
        columns_to_removes: Optional[Sequence[str]] = None,
        name: Optional[str] = None,
        partition_key: Optional[str] = None,
        resource_group_name: Optional[str] = None,
        row_key: Optional[str] = None,
        storage_account_key: Optional[str] = None,
        storage_account_name: Optional[str] = None,
        stream_analytics_job_name: Optional[str] = None,
        table: Optional[str] = None) -> OutputTablefunc GetOutputTable(ctx *Context, name string, id IDInput, state *OutputTableState, opts ...ResourceOption) (*OutputTable, error)public static OutputTable Get(string name, Input<string> id, OutputTableState? state, CustomResourceOptions? opts = null)public static OutputTable get(String name, Output<String> id, OutputTableState state, CustomResourceOptions options)resources:  _:    type: azure:streamanalytics:OutputTable    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- BatchSize int
- The number of records for a batch operation. Must be between 1and100.
- ColumnsTo List<string>Removes 
- A list of the column names to be removed from output event entities.
- Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- PartitionKey string
- The name of the output column that contains the partition key.
- ResourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- RowKey string
- The name of the output column that contains the row key.
- StorageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- StorageAccount stringName 
- The name of the Storage Account.
- StreamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Table string
- The name of the table where the stream should be output to.
- BatchSize int
- The number of records for a batch operation. Must be between 1and100.
- ColumnsTo []stringRemoves 
- A list of the column names to be removed from output event entities.
- Name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- PartitionKey string
- The name of the output column that contains the partition key.
- ResourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- RowKey string
- The name of the output column that contains the row key.
- StorageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- StorageAccount stringName 
- The name of the Storage Account.
- StreamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- Table string
- The name of the table where the stream should be output to.
- batchSize Integer
- The number of records for a batch operation. Must be between 1and100.
- columnsTo List<String>Removes 
- A list of the column names to be removed from output event entities.
- name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- partitionKey String
- The name of the output column that contains the partition key.
- resourceGroup StringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey String
- The name of the output column that contains the row key.
- storageAccount StringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount StringName 
- The name of the Storage Account.
- streamAnalytics StringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table String
- The name of the table where the stream should be output to.
- batchSize number
- The number of records for a batch operation. Must be between 1and100.
- columnsTo string[]Removes 
- A list of the column names to be removed from output event entities.
- name string
- The name of the Stream Output. Changing this forces a new resource to be created.
- partitionKey string
- The name of the output column that contains the partition key.
- resourceGroup stringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey string
- The name of the output column that contains the row key.
- storageAccount stringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount stringName 
- The name of the Storage Account.
- streamAnalytics stringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table string
- The name of the table where the stream should be output to.
- batch_size int
- The number of records for a batch operation. Must be between 1and100.
- columns_to_ Sequence[str]removes 
- A list of the column names to be removed from output event entities.
- name str
- The name of the Stream Output. Changing this forces a new resource to be created.
- partition_key str
- The name of the output column that contains the partition key.
- resource_group_ strname 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- row_key str
- The name of the output column that contains the row key.
- storage_account_ strkey 
- The Access Key which should be used to connect to this Storage Account.
- storage_account_ strname 
- The name of the Storage Account.
- stream_analytics_ strjob_ name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table str
- The name of the table where the stream should be output to.
- batchSize Number
- The number of records for a batch operation. Must be between 1and100.
- columnsTo List<String>Removes 
- A list of the column names to be removed from output event entities.
- name String
- The name of the Stream Output. Changing this forces a new resource to be created.
- partitionKey String
- The name of the output column that contains the partition key.
- resourceGroup StringName 
- The name of the Resource Group where the Stream Analytics Job exists. Changing this forces a new resource to be created.
- rowKey String
- The name of the output column that contains the row key.
- storageAccount StringKey 
- The Access Key which should be used to connect to this Storage Account.
- storageAccount StringName 
- The name of the Storage Account.
- streamAnalytics StringJob Name 
- The name of the Stream Analytics Job. Changing this forces a new resource to be created.
- table String
- The name of the table where the stream should be output to.
Import
Stream Analytics Output to Table can be imported using the resource id, e.g.
$ pulumi import azure:streamanalytics/outputTable:OutputTable example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingJobs/job1/outputs/output1
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the azurermTerraform Provider.