We recommend using Azure Native.
azure.datafactory.DatasetDelimitedText
Explore with Pulumi AI
Manages an Azure Delimited Text Dataset inside an Azure Data Factory.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
    name: "example-resources",
    location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
    name: "example",
    location: example.location,
    resourceGroupName: example.name,
});
const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    authenticationType: "Anonymous",
    url: "https://www.bing.com",
});
const exampleDatasetDelimitedText = new azure.datafactory.DatasetDelimitedText("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedServiceWeb.name,
    httpServerLocation: {
        relativeUrl: "http://www.bing.com",
        path: "foo/bar/",
        filename: "fizz.txt",
    },
    columnDelimiter: ",",
    rowDelimiter: "NEW",
    encoding: "UTF-8",
    quoteCharacter: "x",
    escapeCharacter: "f",
    firstRowAsHeader: true,
    nullValue: "NULL",
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
    name="example-resources",
    location="West Europe")
example_factory = azure.datafactory.Factory("example",
    name="example",
    location=example.location,
    resource_group_name=example.name)
example_linked_service_web = azure.datafactory.LinkedServiceWeb("example",
    name="example",
    data_factory_id=example_factory.id,
    authentication_type="Anonymous",
    url="https://www.bing.com")
example_dataset_delimited_text = azure.datafactory.DatasetDelimitedText("example",
    name="example",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_service_web.name,
    http_server_location={
        "relative_url": "http://www.bing.com",
        "path": "foo/bar/",
        "filename": "fizz.txt",
    },
    column_delimiter=",",
    row_delimiter="NEW",
    encoding="UTF-8",
    quote_character="x",
    escape_character="f",
    first_row_as_header=True,
    null_value="NULL")
package main
import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("example-resources"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
			Name:              pulumi.String("example"),
			Location:          example.Location,
			ResourceGroupName: example.Name,
		})
		if err != nil {
			return err
		}
		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "example", &datafactory.LinkedServiceWebArgs{
			Name:               pulumi.String("example"),
			DataFactoryId:      exampleFactory.ID(),
			AuthenticationType: pulumi.String("Anonymous"),
			Url:                pulumi.String("https://www.bing.com"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewDatasetDelimitedText(ctx, "example", &datafactory.DatasetDelimitedTextArgs{
			Name:              pulumi.String("example"),
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedServiceWeb.Name,
			HttpServerLocation: &datafactory.DatasetDelimitedTextHttpServerLocationArgs{
				RelativeUrl: pulumi.String("http://www.bing.com"),
				Path:        pulumi.String("foo/bar/"),
				Filename:    pulumi.String("fizz.txt"),
			},
			ColumnDelimiter:  pulumi.String(","),
			RowDelimiter:     pulumi.String("NEW"),
			Encoding:         pulumi.String("UTF-8"),
			QuoteCharacter:   pulumi.String("x"),
			EscapeCharacter:  pulumi.String("f"),
			FirstRowAsHeader: pulumi.Bool(true),
			NullValue:        pulumi.String("NULL"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() => 
{
    var example = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "example-resources",
        Location = "West Europe",
    });
    var exampleFactory = new Azure.DataFactory.Factory("example", new()
    {
        Name = "example",
        Location = example.Location,
        ResourceGroupName = example.Name,
    });
    var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        AuthenticationType = "Anonymous",
        Url = "https://www.bing.com",
    });
    var exampleDatasetDelimitedText = new Azure.DataFactory.DatasetDelimitedText("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        LinkedServiceName = exampleLinkedServiceWeb.Name,
        HttpServerLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextHttpServerLocationArgs
        {
            RelativeUrl = "http://www.bing.com",
            Path = "foo/bar/",
            Filename = "fizz.txt",
        },
        ColumnDelimiter = ",",
        RowDelimiter = "NEW",
        Encoding = "UTF-8",
        QuoteCharacter = "x",
        EscapeCharacter = "f",
        FirstRowAsHeader = true,
        NullValue = "NULL",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceWeb;
import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
import com.pulumi.azure.datafactory.DatasetDelimitedText;
import com.pulumi.azure.datafactory.DatasetDelimitedTextArgs;
import com.pulumi.azure.datafactory.inputs.DatasetDelimitedTextHttpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new ResourceGroup("example", ResourceGroupArgs.builder()
            .name("example-resources")
            .location("West Europe")
            .build());
        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
            .name("example")
            .location(example.location())
            .resourceGroupName(example.name())
            .build());
        var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .authenticationType("Anonymous")
            .url("https://www.bing.com")
            .build());
        var exampleDatasetDelimitedText = new DatasetDelimitedText("exampleDatasetDelimitedText", DatasetDelimitedTextArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedServiceWeb.name())
            .httpServerLocation(DatasetDelimitedTextHttpServerLocationArgs.builder()
                .relativeUrl("http://www.bing.com")
                .path("foo/bar/")
                .filename("fizz.txt")
                .build())
            .columnDelimiter(",")
            .rowDelimiter("NEW")
            .encoding("UTF-8")
            .quoteCharacter("x")
            .escapeCharacter("f")
            .firstRowAsHeader(true)
            .nullValue("NULL")
            .build());
    }
}
resources:
  example:
    type: azure:core:ResourceGroup
    properties:
      name: example-resources
      location: West Europe
  exampleFactory:
    type: azure:datafactory:Factory
    name: example
    properties:
      name: example
      location: ${example.location}
      resourceGroupName: ${example.name}
  exampleLinkedServiceWeb:
    type: azure:datafactory:LinkedServiceWeb
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      authenticationType: Anonymous
      url: https://www.bing.com
  exampleDatasetDelimitedText:
    type: azure:datafactory:DatasetDelimitedText
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedServiceWeb.name}
      httpServerLocation:
        relativeUrl: http://www.bing.com
        path: foo/bar/
        filename: fizz.txt
      columnDelimiter: ','
      rowDelimiter: NEW
      encoding: UTF-8
      quoteCharacter: x
      escapeCharacter: f
      firstRowAsHeader: true
      nullValue: NULL
Create DatasetDelimitedText Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DatasetDelimitedText(name: string, args: DatasetDelimitedTextArgs, opts?: CustomResourceOptions);@overload
def DatasetDelimitedText(resource_name: str,
                         args: DatasetDelimitedTextArgs,
                         opts: Optional[ResourceOptions] = None)
@overload
def DatasetDelimitedText(resource_name: str,
                         opts: Optional[ResourceOptions] = None,
                         data_factory_id: Optional[str] = None,
                         linked_service_name: Optional[str] = None,
                         escape_character: Optional[str] = None,
                         first_row_as_header: Optional[bool] = None,
                         column_delimiter: Optional[str] = None,
                         compression_codec: Optional[str] = None,
                         compression_level: Optional[str] = None,
                         azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
                         description: Optional[str] = None,
                         encoding: Optional[str] = None,
                         additional_properties: Optional[Mapping[str, str]] = None,
                         azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
                         folder: Optional[str] = None,
                         http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
                         annotations: Optional[Sequence[str]] = None,
                         name: Optional[str] = None,
                         null_value: Optional[str] = None,
                         parameters: Optional[Mapping[str, str]] = None,
                         quote_character: Optional[str] = None,
                         row_delimiter: Optional[str] = None,
                         schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None)func NewDatasetDelimitedText(ctx *Context, name string, args DatasetDelimitedTextArgs, opts ...ResourceOption) (*DatasetDelimitedText, error)public DatasetDelimitedText(string name, DatasetDelimitedTextArgs args, CustomResourceOptions? opts = null)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args)
public DatasetDelimitedText(String name, DatasetDelimitedTextArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetDelimitedText
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DatasetDelimitedTextArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var datasetDelimitedTextResource = new Azure.DataFactory.DatasetDelimitedText("datasetDelimitedTextResource", new()
{
    DataFactoryId = "string",
    LinkedServiceName = "string",
    EscapeCharacter = "string",
    FirstRowAsHeader = false,
    ColumnDelimiter = "string",
    CompressionCodec = "string",
    CompressionLevel = "string",
    AzureBlobFsLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextAzureBlobFsLocationArgs
    {
        DynamicFileSystemEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        FileSystem = "string",
        Filename = "string",
        Path = "string",
    },
    Description = "string",
    Encoding = "string",
    AdditionalProperties = 
    {
        { "string", "string" },
    },
    AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextAzureBlobStorageLocationArgs
    {
        Container = "string",
        DynamicContainerEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        Filename = "string",
        Path = "string",
    },
    Folder = "string",
    HttpServerLocation = new Azure.DataFactory.Inputs.DatasetDelimitedTextHttpServerLocationArgs
    {
        Filename = "string",
        Path = "string",
        RelativeUrl = "string",
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
    },
    Annotations = new[]
    {
        "string",
    },
    Name = "string",
    NullValue = "string",
    Parameters = 
    {
        { "string", "string" },
    },
    QuoteCharacter = "string",
    RowDelimiter = "string",
    SchemaColumns = new[]
    {
        new Azure.DataFactory.Inputs.DatasetDelimitedTextSchemaColumnArgs
        {
            Name = "string",
            Description = "string",
            Type = "string",
        },
    },
});
example, err := datafactory.NewDatasetDelimitedText(ctx, "datasetDelimitedTextResource", &datafactory.DatasetDelimitedTextArgs{
	DataFactoryId:     pulumi.String("string"),
	LinkedServiceName: pulumi.String("string"),
	EscapeCharacter:   pulumi.String("string"),
	FirstRowAsHeader:  pulumi.Bool(false),
	ColumnDelimiter:   pulumi.String("string"),
	CompressionCodec:  pulumi.String("string"),
	CompressionLevel:  pulumi.String("string"),
	AzureBlobFsLocation: &datafactory.DatasetDelimitedTextAzureBlobFsLocationArgs{
		DynamicFileSystemEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:   pulumi.Bool(false),
		DynamicPathEnabled:       pulumi.Bool(false),
		FileSystem:               pulumi.String("string"),
		Filename:                 pulumi.String("string"),
		Path:                     pulumi.String("string"),
	},
	Description: pulumi.String("string"),
	Encoding:    pulumi.String("string"),
	AdditionalProperties: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	AzureBlobStorageLocation: &datafactory.DatasetDelimitedTextAzureBlobStorageLocationArgs{
		Container:               pulumi.String("string"),
		DynamicContainerEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:  pulumi.Bool(false),
		DynamicPathEnabled:      pulumi.Bool(false),
		Filename:                pulumi.String("string"),
		Path:                    pulumi.String("string"),
	},
	Folder: pulumi.String("string"),
	HttpServerLocation: &datafactory.DatasetDelimitedTextHttpServerLocationArgs{
		Filename:               pulumi.String("string"),
		Path:                   pulumi.String("string"),
		RelativeUrl:            pulumi.String("string"),
		DynamicFilenameEnabled: pulumi.Bool(false),
		DynamicPathEnabled:     pulumi.Bool(false),
	},
	Annotations: pulumi.StringArray{
		pulumi.String("string"),
	},
	Name:      pulumi.String("string"),
	NullValue: pulumi.String("string"),
	Parameters: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	QuoteCharacter: pulumi.String("string"),
	RowDelimiter:   pulumi.String("string"),
	SchemaColumns: datafactory.DatasetDelimitedTextSchemaColumnArray{
		&datafactory.DatasetDelimitedTextSchemaColumnArgs{
			Name:        pulumi.String("string"),
			Description: pulumi.String("string"),
			Type:        pulumi.String("string"),
		},
	},
})
var datasetDelimitedTextResource = new DatasetDelimitedText("datasetDelimitedTextResource", DatasetDelimitedTextArgs.builder()
    .dataFactoryId("string")
    .linkedServiceName("string")
    .escapeCharacter("string")
    .firstRowAsHeader(false)
    .columnDelimiter("string")
    .compressionCodec("string")
    .compressionLevel("string")
    .azureBlobFsLocation(DatasetDelimitedTextAzureBlobFsLocationArgs.builder()
        .dynamicFileSystemEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .fileSystem("string")
        .filename("string")
        .path("string")
        .build())
    .description("string")
    .encoding("string")
    .additionalProperties(Map.of("string", "string"))
    .azureBlobStorageLocation(DatasetDelimitedTextAzureBlobStorageLocationArgs.builder()
        .container("string")
        .dynamicContainerEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .filename("string")
        .path("string")
        .build())
    .folder("string")
    .httpServerLocation(DatasetDelimitedTextHttpServerLocationArgs.builder()
        .filename("string")
        .path("string")
        .relativeUrl("string")
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .build())
    .annotations("string")
    .name("string")
    .nullValue("string")
    .parameters(Map.of("string", "string"))
    .quoteCharacter("string")
    .rowDelimiter("string")
    .schemaColumns(DatasetDelimitedTextSchemaColumnArgs.builder()
        .name("string")
        .description("string")
        .type("string")
        .build())
    .build());
dataset_delimited_text_resource = azure.datafactory.DatasetDelimitedText("datasetDelimitedTextResource",
    data_factory_id="string",
    linked_service_name="string",
    escape_character="string",
    first_row_as_header=False,
    column_delimiter="string",
    compression_codec="string",
    compression_level="string",
    azure_blob_fs_location={
        "dynamic_file_system_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "file_system": "string",
        "filename": "string",
        "path": "string",
    },
    description="string",
    encoding="string",
    additional_properties={
        "string": "string",
    },
    azure_blob_storage_location={
        "container": "string",
        "dynamic_container_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "filename": "string",
        "path": "string",
    },
    folder="string",
    http_server_location={
        "filename": "string",
        "path": "string",
        "relative_url": "string",
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
    },
    annotations=["string"],
    name="string",
    null_value="string",
    parameters={
        "string": "string",
    },
    quote_character="string",
    row_delimiter="string",
    schema_columns=[{
        "name": "string",
        "description": "string",
        "type": "string",
    }])
const datasetDelimitedTextResource = new azure.datafactory.DatasetDelimitedText("datasetDelimitedTextResource", {
    dataFactoryId: "string",
    linkedServiceName: "string",
    escapeCharacter: "string",
    firstRowAsHeader: false,
    columnDelimiter: "string",
    compressionCodec: "string",
    compressionLevel: "string",
    azureBlobFsLocation: {
        dynamicFileSystemEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        fileSystem: "string",
        filename: "string",
        path: "string",
    },
    description: "string",
    encoding: "string",
    additionalProperties: {
        string: "string",
    },
    azureBlobStorageLocation: {
        container: "string",
        dynamicContainerEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        filename: "string",
        path: "string",
    },
    folder: "string",
    httpServerLocation: {
        filename: "string",
        path: "string",
        relativeUrl: "string",
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
    },
    annotations: ["string"],
    name: "string",
    nullValue: "string",
    parameters: {
        string: "string",
    },
    quoteCharacter: "string",
    rowDelimiter: "string",
    schemaColumns: [{
        name: "string",
        description: "string",
        type: "string",
    }],
});
type: azure:datafactory:DatasetDelimitedText
properties:
    additionalProperties:
        string: string
    annotations:
        - string
    azureBlobFsLocation:
        dynamicFileSystemEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        fileSystem: string
        filename: string
        path: string
    azureBlobStorageLocation:
        container: string
        dynamicContainerEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
    columnDelimiter: string
    compressionCodec: string
    compressionLevel: string
    dataFactoryId: string
    description: string
    encoding: string
    escapeCharacter: string
    firstRowAsHeader: false
    folder: string
    httpServerLocation:
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
        relativeUrl: string
    linkedServiceName: string
    name: string
    nullValue: string
    parameters:
        string: string
    quoteCharacter: string
    rowDelimiter: string
    schemaColumns:
        - description: string
          name: string
          type: string
DatasetDelimitedText Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DatasetDelimitedText resource accepts the following input properties:
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- AdditionalProperties Dictionary<string, string>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- ColumnDelimiter string
- The column delimiter. Defaults to ,.
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- CompressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- Description string
- The description for the Data Factory Dataset.
- Encoding string
- The encoding format for the file.
- EscapeCharacter string
- The escape character. Defaults to \.
- FirstRow boolAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- NullValue string
- The null value string. Defaults to "".
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset.
- QuoteCharacter string
- The quote character. Defaults to ".
- RowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- SchemaColumns List<DatasetDelimited Text Schema Column> 
- A schema_columnblock as defined below.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- AdditionalProperties map[string]string
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Delimited Text Azure Blob Fs Location Args 
- An azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location Args 
- An azure_blob_storage_locationblock as defined below.
- ColumnDelimiter string
- The column delimiter. Defaults to ,.
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- CompressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- Description string
- The description for the Data Factory Dataset.
- Encoding string
- The encoding format for the file.
- EscapeCharacter string
- The escape character. Defaults to \.
- FirstRow boolAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Delimited Text Http Server Location Args 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- NullValue string
- The null value string. Defaults to "".
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset.
- QuoteCharacter string
- The quote character. Defaults to ".
- RowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- SchemaColumns []DatasetDelimited Text Schema Column Args 
- A schema_columnblock as defined below.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties Map<String,String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter String
- The column delimiter. Defaults to ,.
- compressionCodec String
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel String
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- description String
- The description for the Data Factory Dataset.
- encoding String
- The encoding format for the file.
- escapeCharacter String
- The escape character. Defaults to \.
- firstRow BooleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue String
- The null value string. Defaults to "".
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter String
- The quote character. Defaults to ".
- rowDelimiter String
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns List<DatasetDelimited Text Schema Column> 
- A schema_columnblock as defined below.
- dataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties {[key: string]: string}
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter string
- The column delimiter. Defaults to ,.
- compressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- description string
- The description for the Data Factory Dataset.
- encoding string
- The encoding format for the file.
- escapeCharacter string
- The escape character. Defaults to \.
- firstRow booleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue string
- The null value string. Defaults to "".
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter string
- The quote character. Defaults to ".
- rowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns DatasetDelimited Text Schema Column[] 
- A schema_columnblock as defined below.
- data_factory_ strid 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linked_service_ strname 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additional_properties Mapping[str, str]
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset.
- azure_blob_ Datasetfs_ location Delimited Text Azure Blob Fs Location Args 
- An azure_blob_fs_locationblock as defined below.
- azure_blob_ Datasetstorage_ location Delimited Text Azure Blob Storage Location Args 
- An azure_blob_storage_locationblock as defined below.
- column_delimiter str
- The column delimiter. Defaults to ,.
- compression_codec str
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compression_level str
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- description str
- The description for the Data Factory Dataset.
- encoding str
- The encoding format for the file.
- escape_character str
- The escape character. Defaults to \.
- first_row_ boolas_ header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_server_ Datasetlocation Delimited Text Http Server Location Args 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- name str
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null_value str
- The null value string. Defaults to "".
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset.
- quote_character str
- The quote character. Defaults to ".
- row_delimiter str
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schema_columns Sequence[DatasetDelimited Text Schema Column Args] 
- A schema_columnblock as defined below.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties Map<String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob Property MapFs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob Property MapStorage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter String
- The column delimiter. Defaults to ,.
- compressionCodec String
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel String
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- description String
- The description for the Data Factory Dataset.
- encoding String
- The encoding format for the file.
- escapeCharacter String
- The escape character. Defaults to \.
- firstRow BooleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer Property MapLocation 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue String
- The null value string. Defaults to "".
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter String
- The quote character. Defaults to ".
- rowDelimiter String
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns List<Property Map>
- A schema_columnblock as defined below.
Outputs
All input properties are implicitly available as output properties. Additionally, the DatasetDelimitedText resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing DatasetDelimitedText Resource
Get an existing DatasetDelimitedText resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DatasetDelimitedTextState, opts?: CustomResourceOptions): DatasetDelimitedText@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        additional_properties: Optional[Mapping[str, str]] = None,
        annotations: Optional[Sequence[str]] = None,
        azure_blob_fs_location: Optional[DatasetDelimitedTextAzureBlobFsLocationArgs] = None,
        azure_blob_storage_location: Optional[DatasetDelimitedTextAzureBlobStorageLocationArgs] = None,
        column_delimiter: Optional[str] = None,
        compression_codec: Optional[str] = None,
        compression_level: Optional[str] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        encoding: Optional[str] = None,
        escape_character: Optional[str] = None,
        first_row_as_header: Optional[bool] = None,
        folder: Optional[str] = None,
        http_server_location: Optional[DatasetDelimitedTextHttpServerLocationArgs] = None,
        linked_service_name: Optional[str] = None,
        name: Optional[str] = None,
        null_value: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        quote_character: Optional[str] = None,
        row_delimiter: Optional[str] = None,
        schema_columns: Optional[Sequence[DatasetDelimitedTextSchemaColumnArgs]] = None) -> DatasetDelimitedTextfunc GetDatasetDelimitedText(ctx *Context, name string, id IDInput, state *DatasetDelimitedTextState, opts ...ResourceOption) (*DatasetDelimitedText, error)public static DatasetDelimitedText Get(string name, Input<string> id, DatasetDelimitedTextState? state, CustomResourceOptions? opts = null)public static DatasetDelimitedText get(String name, Output<String> id, DatasetDelimitedTextState state, CustomResourceOptions options)resources:  _:    type: azure:datafactory:DatasetDelimitedText    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AdditionalProperties Dictionary<string, string>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- ColumnDelimiter string
- The column delimiter. Defaults to ,.
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- CompressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset.
- Encoding string
- The encoding format for the file.
- EscapeCharacter string
- The escape character. Defaults to \.
- FirstRow boolAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- NullValue string
- The null value string. Defaults to "".
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset.
- QuoteCharacter string
- The quote character. Defaults to ".
- RowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- SchemaColumns List<DatasetDelimited Text Schema Column> 
- A schema_columnblock as defined below.
- AdditionalProperties map[string]string
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Delimited Text Azure Blob Fs Location Args 
- An azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location Args 
- An azure_blob_storage_locationblock as defined below.
- ColumnDelimiter string
- The column delimiter. Defaults to ,.
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- CompressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- DataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset.
- Encoding string
- The encoding format for the file.
- EscapeCharacter string
- The escape character. Defaults to \.
- FirstRow boolAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Delimited Text Http Server Location Args 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- NullValue string
- The null value string. Defaults to "".
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset.
- QuoteCharacter string
- The quote character. Defaults to ".
- RowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- SchemaColumns []DatasetDelimited Text Schema Column Args 
- A schema_columnblock as defined below.
- additionalProperties Map<String,String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter String
- The column delimiter. Defaults to ,.
- compressionCodec String
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel String
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset.
- encoding String
- The encoding format for the file.
- escapeCharacter String
- The escape character. Defaults to \.
- firstRow BooleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue String
- The null value string. Defaults to "".
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter String
- The quote character. Defaults to ".
- rowDelimiter String
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns List<DatasetDelimited Text Schema Column> 
- A schema_columnblock as defined below.
- additionalProperties {[key: string]: string}
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Delimited Text Azure Blob Fs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Delimited Text Azure Blob Storage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter string
- The column delimiter. Defaults to ,.
- compressionCodec string
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel string
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- dataFactory stringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description string
- The description for the Data Factory Dataset.
- encoding string
- The encoding format for the file.
- escapeCharacter string
- The escape character. Defaults to \.
- firstRow booleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Delimited Text Http Server Location 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- linkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue string
- The null value string. Defaults to "".
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter string
- The quote character. Defaults to ".
- rowDelimiter string
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns DatasetDelimited Text Schema Column[] 
- A schema_columnblock as defined below.
- additional_properties Mapping[str, str]
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset.
- azure_blob_ Datasetfs_ location Delimited Text Azure Blob Fs Location Args 
- An azure_blob_fs_locationblock as defined below.
- azure_blob_ Datasetstorage_ location Delimited Text Azure Blob Storage Location Args 
- An azure_blob_storage_locationblock as defined below.
- column_delimiter str
- The column delimiter. Defaults to ,.
- compression_codec str
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compression_level str
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- data_factory_ strid 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description str
- The description for the Data Factory Dataset.
- encoding str
- The encoding format for the file.
- escape_character str
- The escape character. Defaults to \.
- first_row_ boolas_ header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_server_ Datasetlocation Delimited Text Http Server Location Args 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- linked_service_ strname 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name str
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- null_value str
- The null value string. Defaults to "".
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset.
- quote_character str
- The quote character. Defaults to ".
- row_delimiter str
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schema_columns Sequence[DatasetDelimited Text Schema Column Args] 
- A schema_columnblock as defined below.
- additionalProperties Map<String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Delimited Text Dataset (exactly one of them must be set): 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob Property MapFs Location 
- An azure_blob_fs_locationblock as defined below.
- azureBlob Property MapStorage Location 
- An azure_blob_storage_locationblock as defined below.
- columnDelimiter String
- The column delimiter. Defaults to ,.
- compressionCodec String
- The compression codec used to read/write text files. Valid values are None,bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappyandlz4. Please note these values are case sensitive.
- compressionLevel String
- The compression ratio for the Data Factory Dataset. Valid values are FastestorOptimal. Please note these values are case sensitive.
- dataFactory StringId 
- The Data Factory ID in which to associate the Linked Service with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset.
- encoding String
- The encoding format for the file.
- escapeCharacter String
- The escape character. Defaults to \.
- firstRow BooleanAs Header 
- When used as input, treat the first row of data as headers. When used as output, write the headers into the output as the first row of data. Defaults to false.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer Property MapLocation 
- A - http_server_locationblock as defined below.- The following supported arguments are specific to Delimited Text Dataset: 
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- nullValue String
- The null value string. Defaults to "".
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset.
- quoteCharacter String
- The quote character. Defaults to ".
- rowDelimiter String
- The row delimiter. Defaults to any of the following values on read: \r\n,\r,\n, and\nor\r\non write by mapping data flow and Copy activity respectively.
- schemaColumns List<Property Map>
- A schema_columnblock as defined below.
Supporting Types
DatasetDelimitedTextAzureBlobFsLocation, DatasetDelimitedTextAzureBlobFsLocationArgs              
- DynamicFile boolSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- FileSystem string
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- Filename string
- The filename of the file.
- Path string
- The folder path to the file.
- DynamicFile boolSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- FileSystem string
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- Filename string
- The filename of the file.
- Path string
- The folder path to the file.
- dynamicFile BooleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem String
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename String
- The filename of the file.
- path String
- The folder path to the file.
- dynamicFile booleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem string
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename string
- The filename of the file.
- path string
- The folder path to the file.
- dynamic_file_ boolsystem_ enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- file_system str
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename str
- The filename of the file.
- path str
- The folder path to the file.
- dynamicFile BooleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem String
- The storage data lake gen2 file system on the Azure Blob Storage Account hosting the file.
- filename String
- The filename of the file.
- path String
- The folder path to the file.
DatasetDelimitedTextAzureBlobStorageLocation, DatasetDelimitedTextAzureBlobStorageLocationArgs              
- Container string
- The container on the Azure Blob Storage Account hosting the file.
- DynamicContainer boolEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Filename string
- The filename of the file.
- Path string
- The folder path to the file. This can be an empty string.
- Container string
- The container on the Azure Blob Storage Account hosting the file.
- DynamicContainer boolEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Filename string
- The filename of the file.
- Path string
- The folder path to the file. This can be an empty string.
- container String
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer BooleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file.
- path String
- The folder path to the file. This can be an empty string.
- container string
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer booleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename string
- The filename of the file.
- path string
- The folder path to the file. This can be an empty string.
- container str
- The container on the Azure Blob Storage Account hosting the file.
- dynamic_container_ boolenabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename str
- The filename of the file.
- path str
- The folder path to the file. This can be an empty string.
- container String
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer BooleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file.
- path String
- The folder path to the file. This can be an empty string.
DatasetDelimitedTextHttpServerLocation, DatasetDelimitedTextHttpServerLocationArgs            
- Filename string
- The filename of the file on the web server.
- Path string
- The folder path to the file on the web server.
- RelativeUrl string
- The base URL to the web server hosting the file.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Filename string
- The filename of the file on the web server.
- Path string
- The folder path to the file on the web server.
- RelativeUrl string
- The base URL to the web server hosting the file.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file on the web server.
- path String
- The folder path to the file on the web server.
- relativeUrl String
- The base URL to the web server hosting the file.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename string
- The filename of the file on the web server.
- path string
- The folder path to the file on the web server.
- relativeUrl string
- The base URL to the web server hosting the file.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename str
- The filename of the file on the web server.
- path str
- The folder path to the file on the web server.
- relative_url str
- The base URL to the web server hosting the file.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file on the web server.
- path String
- The folder path to the file on the web server.
- relativeUrl String
- The base URL to the web server hosting the file.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
DatasetDelimitedTextSchemaColumn, DatasetDelimitedTextSchemaColumnArgs          
- Name string
- The name of the column.
- Description string
- The description of the column.
- Type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- Name string
- The name of the column.
- Description string
- The description of the column.
- Type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name String
- The name of the column.
- description String
- The description of the column.
- type String
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name string
- The name of the column.
- description string
- The description of the column.
- type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name str
- The name of the column.
- description str
- The description of the column.
- type str
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name String
- The name of the column.
- description String
- The description of the column.
- type String
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
Import
Data Factory Datasets can be imported using the resource id, e.g.
$ pulumi import azure:datafactory/datasetDelimitedText:DatasetDelimitedText example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the azurermTerraform Provider.