We recommend using Azure Native.
azure.datafactory.DatasetParquet
Explore with Pulumi AI
Manages an Azure Parquet Dataset inside an Azure Data Factory.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as azure from "@pulumi/azure";
const example = new azure.core.ResourceGroup("example", {
    name: "example-resources",
    location: "West Europe",
});
const exampleFactory = new azure.datafactory.Factory("example", {
    name: "example",
    location: example.location,
    resourceGroupName: example.name,
});
const exampleLinkedServiceWeb = new azure.datafactory.LinkedServiceWeb("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    authenticationType: "Anonymous",
    url: "https://www.bing.com",
});
const exampleDatasetParquet = new azure.datafactory.DatasetParquet("example", {
    name: "example",
    dataFactoryId: exampleFactory.id,
    linkedServiceName: exampleLinkedServiceWeb.name,
    httpServerLocation: {
        relativeUrl: "http://www.bing.com",
        path: "foo/bar/",
        filename: "fizz.txt",
    },
});
import pulumi
import pulumi_azure as azure
example = azure.core.ResourceGroup("example",
    name="example-resources",
    location="West Europe")
example_factory = azure.datafactory.Factory("example",
    name="example",
    location=example.location,
    resource_group_name=example.name)
example_linked_service_web = azure.datafactory.LinkedServiceWeb("example",
    name="example",
    data_factory_id=example_factory.id,
    authentication_type="Anonymous",
    url="https://www.bing.com")
example_dataset_parquet = azure.datafactory.DatasetParquet("example",
    name="example",
    data_factory_id=example_factory.id,
    linked_service_name=example_linked_service_web.name,
    http_server_location={
        "relative_url": "http://www.bing.com",
        "path": "foo/bar/",
        "filename": "fizz.txt",
    })
package main
import (
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/core"
	"github.com/pulumi/pulumi-azure/sdk/v6/go/azure/datafactory"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		example, err := core.NewResourceGroup(ctx, "example", &core.ResourceGroupArgs{
			Name:     pulumi.String("example-resources"),
			Location: pulumi.String("West Europe"),
		})
		if err != nil {
			return err
		}
		exampleFactory, err := datafactory.NewFactory(ctx, "example", &datafactory.FactoryArgs{
			Name:              pulumi.String("example"),
			Location:          example.Location,
			ResourceGroupName: example.Name,
		})
		if err != nil {
			return err
		}
		exampleLinkedServiceWeb, err := datafactory.NewLinkedServiceWeb(ctx, "example", &datafactory.LinkedServiceWebArgs{
			Name:               pulumi.String("example"),
			DataFactoryId:      exampleFactory.ID(),
			AuthenticationType: pulumi.String("Anonymous"),
			Url:                pulumi.String("https://www.bing.com"),
		})
		if err != nil {
			return err
		}
		_, err = datafactory.NewDatasetParquet(ctx, "example", &datafactory.DatasetParquetArgs{
			Name:              pulumi.String("example"),
			DataFactoryId:     exampleFactory.ID(),
			LinkedServiceName: exampleLinkedServiceWeb.Name,
			HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
				RelativeUrl: pulumi.String("http://www.bing.com"),
				Path:        pulumi.String("foo/bar/"),
				Filename:    pulumi.String("fizz.txt"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Azure = Pulumi.Azure;
return await Deployment.RunAsync(() => 
{
    var example = new Azure.Core.ResourceGroup("example", new()
    {
        Name = "example-resources",
        Location = "West Europe",
    });
    var exampleFactory = new Azure.DataFactory.Factory("example", new()
    {
        Name = "example",
        Location = example.Location,
        ResourceGroupName = example.Name,
    });
    var exampleLinkedServiceWeb = new Azure.DataFactory.LinkedServiceWeb("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        AuthenticationType = "Anonymous",
        Url = "https://www.bing.com",
    });
    var exampleDatasetParquet = new Azure.DataFactory.DatasetParquet("example", new()
    {
        Name = "example",
        DataFactoryId = exampleFactory.Id,
        LinkedServiceName = exampleLinkedServiceWeb.Name,
        HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
        {
            RelativeUrl = "http://www.bing.com",
            Path = "foo/bar/",
            Filename = "fizz.txt",
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.azure.core.ResourceGroup;
import com.pulumi.azure.core.ResourceGroupArgs;
import com.pulumi.azure.datafactory.Factory;
import com.pulumi.azure.datafactory.FactoryArgs;
import com.pulumi.azure.datafactory.LinkedServiceWeb;
import com.pulumi.azure.datafactory.LinkedServiceWebArgs;
import com.pulumi.azure.datafactory.DatasetParquet;
import com.pulumi.azure.datafactory.DatasetParquetArgs;
import com.pulumi.azure.datafactory.inputs.DatasetParquetHttpServerLocationArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new ResourceGroup("example", ResourceGroupArgs.builder()
            .name("example-resources")
            .location("West Europe")
            .build());
        var exampleFactory = new Factory("exampleFactory", FactoryArgs.builder()
            .name("example")
            .location(example.location())
            .resourceGroupName(example.name())
            .build());
        var exampleLinkedServiceWeb = new LinkedServiceWeb("exampleLinkedServiceWeb", LinkedServiceWebArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .authenticationType("Anonymous")
            .url("https://www.bing.com")
            .build());
        var exampleDatasetParquet = new DatasetParquet("exampleDatasetParquet", DatasetParquetArgs.builder()
            .name("example")
            .dataFactoryId(exampleFactory.id())
            .linkedServiceName(exampleLinkedServiceWeb.name())
            .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
                .relativeUrl("http://www.bing.com")
                .path("foo/bar/")
                .filename("fizz.txt")
                .build())
            .build());
    }
}
resources:
  example:
    type: azure:core:ResourceGroup
    properties:
      name: example-resources
      location: West Europe
  exampleFactory:
    type: azure:datafactory:Factory
    name: example
    properties:
      name: example
      location: ${example.location}
      resourceGroupName: ${example.name}
  exampleLinkedServiceWeb:
    type: azure:datafactory:LinkedServiceWeb
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      authenticationType: Anonymous
      url: https://www.bing.com
  exampleDatasetParquet:
    type: azure:datafactory:DatasetParquet
    name: example
    properties:
      name: example
      dataFactoryId: ${exampleFactory.id}
      linkedServiceName: ${exampleLinkedServiceWeb.name}
      httpServerLocation:
        relativeUrl: http://www.bing.com
        path: foo/bar/
        filename: fizz.txt
Create DatasetParquet Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new DatasetParquet(name: string, args: DatasetParquetArgs, opts?: CustomResourceOptions);@overload
def DatasetParquet(resource_name: str,
                   args: DatasetParquetArgs,
                   opts: Optional[ResourceOptions] = None)
@overload
def DatasetParquet(resource_name: str,
                   opts: Optional[ResourceOptions] = None,
                   data_factory_id: Optional[str] = None,
                   linked_service_name: Optional[str] = None,
                   azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
                   additional_properties: Optional[Mapping[str, str]] = None,
                   compression_codec: Optional[str] = None,
                   compression_level: Optional[str] = None,
                   azure_blob_fs_location: Optional[DatasetParquetAzureBlobFsLocationArgs] = None,
                   description: Optional[str] = None,
                   folder: Optional[str] = None,
                   http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
                   annotations: Optional[Sequence[str]] = None,
                   name: Optional[str] = None,
                   parameters: Optional[Mapping[str, str]] = None,
                   schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None)func NewDatasetParquet(ctx *Context, name string, args DatasetParquetArgs, opts ...ResourceOption) (*DatasetParquet, error)public DatasetParquet(string name, DatasetParquetArgs args, CustomResourceOptions? opts = null)
public DatasetParquet(String name, DatasetParquetArgs args)
public DatasetParquet(String name, DatasetParquetArgs args, CustomResourceOptions options)
type: azure:datafactory:DatasetParquet
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args DatasetParquetArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args DatasetParquetArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args DatasetParquetArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args DatasetParquetArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args DatasetParquetArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var datasetParquetResource = new Azure.DataFactory.DatasetParquet("datasetParquetResource", new()
{
    DataFactoryId = "string",
    LinkedServiceName = "string",
    AzureBlobStorageLocation = new Azure.DataFactory.Inputs.DatasetParquetAzureBlobStorageLocationArgs
    {
        Container = "string",
        DynamicContainerEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        Filename = "string",
        Path = "string",
    },
    AdditionalProperties = 
    {
        { "string", "string" },
    },
    CompressionCodec = "string",
    CompressionLevel = "string",
    AzureBlobFsLocation = new Azure.DataFactory.Inputs.DatasetParquetAzureBlobFsLocationArgs
    {
        DynamicFileSystemEnabled = false,
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        FileSystem = "string",
        Filename = "string",
        Path = "string",
    },
    Description = "string",
    Folder = "string",
    HttpServerLocation = new Azure.DataFactory.Inputs.DatasetParquetHttpServerLocationArgs
    {
        Filename = "string",
        RelativeUrl = "string",
        DynamicFilenameEnabled = false,
        DynamicPathEnabled = false,
        Path = "string",
    },
    Annotations = new[]
    {
        "string",
    },
    Name = "string",
    Parameters = 
    {
        { "string", "string" },
    },
    SchemaColumns = new[]
    {
        new Azure.DataFactory.Inputs.DatasetParquetSchemaColumnArgs
        {
            Name = "string",
            Description = "string",
            Type = "string",
        },
    },
});
example, err := datafactory.NewDatasetParquet(ctx, "datasetParquetResource", &datafactory.DatasetParquetArgs{
	DataFactoryId:     pulumi.String("string"),
	LinkedServiceName: pulumi.String("string"),
	AzureBlobStorageLocation: &datafactory.DatasetParquetAzureBlobStorageLocationArgs{
		Container:               pulumi.String("string"),
		DynamicContainerEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:  pulumi.Bool(false),
		DynamicPathEnabled:      pulumi.Bool(false),
		Filename:                pulumi.String("string"),
		Path:                    pulumi.String("string"),
	},
	AdditionalProperties: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	CompressionCodec: pulumi.String("string"),
	CompressionLevel: pulumi.String("string"),
	AzureBlobFsLocation: &datafactory.DatasetParquetAzureBlobFsLocationArgs{
		DynamicFileSystemEnabled: pulumi.Bool(false),
		DynamicFilenameEnabled:   pulumi.Bool(false),
		DynamicPathEnabled:       pulumi.Bool(false),
		FileSystem:               pulumi.String("string"),
		Filename:                 pulumi.String("string"),
		Path:                     pulumi.String("string"),
	},
	Description: pulumi.String("string"),
	Folder:      pulumi.String("string"),
	HttpServerLocation: &datafactory.DatasetParquetHttpServerLocationArgs{
		Filename:               pulumi.String("string"),
		RelativeUrl:            pulumi.String("string"),
		DynamicFilenameEnabled: pulumi.Bool(false),
		DynamicPathEnabled:     pulumi.Bool(false),
		Path:                   pulumi.String("string"),
	},
	Annotations: pulumi.StringArray{
		pulumi.String("string"),
	},
	Name: pulumi.String("string"),
	Parameters: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
	SchemaColumns: datafactory.DatasetParquetSchemaColumnArray{
		&datafactory.DatasetParquetSchemaColumnArgs{
			Name:        pulumi.String("string"),
			Description: pulumi.String("string"),
			Type:        pulumi.String("string"),
		},
	},
})
var datasetParquetResource = new DatasetParquet("datasetParquetResource", DatasetParquetArgs.builder()
    .dataFactoryId("string")
    .linkedServiceName("string")
    .azureBlobStorageLocation(DatasetParquetAzureBlobStorageLocationArgs.builder()
        .container("string")
        .dynamicContainerEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .filename("string")
        .path("string")
        .build())
    .additionalProperties(Map.of("string", "string"))
    .compressionCodec("string")
    .compressionLevel("string")
    .azureBlobFsLocation(DatasetParquetAzureBlobFsLocationArgs.builder()
        .dynamicFileSystemEnabled(false)
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .fileSystem("string")
        .filename("string")
        .path("string")
        .build())
    .description("string")
    .folder("string")
    .httpServerLocation(DatasetParquetHttpServerLocationArgs.builder()
        .filename("string")
        .relativeUrl("string")
        .dynamicFilenameEnabled(false)
        .dynamicPathEnabled(false)
        .path("string")
        .build())
    .annotations("string")
    .name("string")
    .parameters(Map.of("string", "string"))
    .schemaColumns(DatasetParquetSchemaColumnArgs.builder()
        .name("string")
        .description("string")
        .type("string")
        .build())
    .build());
dataset_parquet_resource = azure.datafactory.DatasetParquet("datasetParquetResource",
    data_factory_id="string",
    linked_service_name="string",
    azure_blob_storage_location={
        "container": "string",
        "dynamic_container_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "filename": "string",
        "path": "string",
    },
    additional_properties={
        "string": "string",
    },
    compression_codec="string",
    compression_level="string",
    azure_blob_fs_location={
        "dynamic_file_system_enabled": False,
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "file_system": "string",
        "filename": "string",
        "path": "string",
    },
    description="string",
    folder="string",
    http_server_location={
        "filename": "string",
        "relative_url": "string",
        "dynamic_filename_enabled": False,
        "dynamic_path_enabled": False,
        "path": "string",
    },
    annotations=["string"],
    name="string",
    parameters={
        "string": "string",
    },
    schema_columns=[{
        "name": "string",
        "description": "string",
        "type": "string",
    }])
const datasetParquetResource = new azure.datafactory.DatasetParquet("datasetParquetResource", {
    dataFactoryId: "string",
    linkedServiceName: "string",
    azureBlobStorageLocation: {
        container: "string",
        dynamicContainerEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        filename: "string",
        path: "string",
    },
    additionalProperties: {
        string: "string",
    },
    compressionCodec: "string",
    compressionLevel: "string",
    azureBlobFsLocation: {
        dynamicFileSystemEnabled: false,
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        fileSystem: "string",
        filename: "string",
        path: "string",
    },
    description: "string",
    folder: "string",
    httpServerLocation: {
        filename: "string",
        relativeUrl: "string",
        dynamicFilenameEnabled: false,
        dynamicPathEnabled: false,
        path: "string",
    },
    annotations: ["string"],
    name: "string",
    parameters: {
        string: "string",
    },
    schemaColumns: [{
        name: "string",
        description: "string",
        type: "string",
    }],
});
type: azure:datafactory:DatasetParquet
properties:
    additionalProperties:
        string: string
    annotations:
        - string
    azureBlobFsLocation:
        dynamicFileSystemEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        fileSystem: string
        filename: string
        path: string
    azureBlobStorageLocation:
        container: string
        dynamicContainerEnabled: false
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
    compressionCodec: string
    compressionLevel: string
    dataFactoryId: string
    description: string
    folder: string
    httpServerLocation:
        dynamicFilenameEnabled: false
        dynamicPathEnabled: false
        filename: string
        path: string
        relativeUrl: string
    linkedServiceName: string
    name: string
    parameters:
        string: string
    schemaColumns:
        - description: string
          name: string
          type: string
DatasetParquet Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The DatasetParquet resource accepts the following input properties:
- DataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- AdditionalProperties Dictionary<string, string>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- CompressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- Description string
- The description for the Data Factory Dataset.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset.
- SchemaColumns List<DatasetParquet Schema Column> 
- A schema_columnblock as defined below.
- DataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- AdditionalProperties map[string]string
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Parquet Azure Blob Fs Location Args 
- A azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Parquet Azure Blob Storage Location Args 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- CompressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- Description string
- The description for the Data Factory Dataset.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Parquet Http Server Location Args 
- A http_server_locationblock as defined below.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset.
- SchemaColumns []DatasetParquet Schema Column Args 
- A schema_columnblock as defined below.
- dataFactory StringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties Map<String,String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec String
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel String
- Specifies the compression level. Possible values are OptimalandFastest,
- description String
- The description for the Data Factory Dataset.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns List<DatasetParquet Schema Column> 
- A schema_columnblock as defined below.
- dataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- linkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties {[key: string]: string}
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- description string
- The description for the Data Factory Dataset.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns DatasetParquet Schema Column[] 
- A schema_columnblock as defined below.
- data_factory_ strid 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- linked_service_ strname 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additional_properties Mapping[str, str]
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset.
- azure_blob_ Datasetfs_ location Parquet Azure Blob Fs Location Args 
- A azure_blob_fs_locationblock as defined below.
- azure_blob_ Datasetstorage_ location Parquet Azure Blob Storage Location Args 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compression_codec str
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compression_level str
- Specifies the compression level. Possible values are OptimalandFastest,
- description str
- The description for the Data Factory Dataset.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_server_ Datasetlocation Parquet Http Server Location Args 
- A http_server_locationblock as defined below.
- name str
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset.
- schema_columns Sequence[DatasetParquet Schema Column Args] 
- A schema_columnblock as defined below.
- dataFactory StringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- additionalProperties Map<String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob Property MapFs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob Property MapStorage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec String
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel String
- Specifies the compression level. Possible values are OptimalandFastest,
- description String
- The description for the Data Factory Dataset.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer Property MapLocation 
- A http_server_locationblock as defined below.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns List<Property Map>
- A schema_columnblock as defined below.
Outputs
All input properties are implicitly available as output properties. Additionally, the DatasetParquet resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing DatasetParquet Resource
Get an existing DatasetParquet resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: DatasetParquetState, opts?: CustomResourceOptions): DatasetParquet@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        additional_properties: Optional[Mapping[str, str]] = None,
        annotations: Optional[Sequence[str]] = None,
        azure_blob_fs_location: Optional[DatasetParquetAzureBlobFsLocationArgs] = None,
        azure_blob_storage_location: Optional[DatasetParquetAzureBlobStorageLocationArgs] = None,
        compression_codec: Optional[str] = None,
        compression_level: Optional[str] = None,
        data_factory_id: Optional[str] = None,
        description: Optional[str] = None,
        folder: Optional[str] = None,
        http_server_location: Optional[DatasetParquetHttpServerLocationArgs] = None,
        linked_service_name: Optional[str] = None,
        name: Optional[str] = None,
        parameters: Optional[Mapping[str, str]] = None,
        schema_columns: Optional[Sequence[DatasetParquetSchemaColumnArgs]] = None) -> DatasetParquetfunc GetDatasetParquet(ctx *Context, name string, id IDInput, state *DatasetParquetState, opts ...ResourceOption) (*DatasetParquet, error)public static DatasetParquet Get(string name, Input<string> id, DatasetParquetState? state, CustomResourceOptions? opts = null)public static DatasetParquet get(String name, Output<String> id, DatasetParquetState state, CustomResourceOptions options)resources:  _:    type: azure:datafactory:DatasetParquet    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AdditionalProperties Dictionary<string, string>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- Annotations List<string>
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- CompressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- DataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters Dictionary<string, string>
- A map of parameters to associate with the Data Factory Dataset.
- SchemaColumns List<DatasetParquet Schema Column> 
- A schema_columnblock as defined below.
- AdditionalProperties map[string]string
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- Annotations []string
- List of tags that can be used for describing the Data Factory Dataset.
- AzureBlob DatasetFs Location Parquet Azure Blob Fs Location Args 
- A azure_blob_fs_locationblock as defined below.
- AzureBlob DatasetStorage Location Parquet Azure Blob Storage Location Args 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- CompressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- CompressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- DataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- Description string
- The description for the Data Factory Dataset.
- Folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- HttpServer DatasetLocation Parquet Http Server Location Args 
- A http_server_locationblock as defined below.
- LinkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- Name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- Parameters map[string]string
- A map of parameters to associate with the Data Factory Dataset.
- SchemaColumns []DatasetParquet Schema Column Args 
- A schema_columnblock as defined below.
- additionalProperties Map<String,String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec String
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel String
- Specifies the compression level. Possible values are OptimalandFastest,
- dataFactory StringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String,String>
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns List<DatasetParquet Schema Column> 
- A schema_columnblock as defined below.
- additionalProperties {[key: string]: string}
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations string[]
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob DatasetFs Location Parquet Azure Blob Fs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob DatasetStorage Location Parquet Azure Blob Storage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec string
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel string
- Specifies the compression level. Possible values are OptimalandFastest,
- dataFactory stringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- description string
- The description for the Data Factory Dataset.
- folder string
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer DatasetLocation Parquet Http Server Location 
- A http_server_locationblock as defined below.
- linkedService stringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name string
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters {[key: string]: string}
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns DatasetParquet Schema Column[] 
- A schema_columnblock as defined below.
- additional_properties Mapping[str, str]
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations Sequence[str]
- List of tags that can be used for describing the Data Factory Dataset.
- azure_blob_ Datasetfs_ location Parquet Azure Blob Fs Location Args 
- A azure_blob_fs_locationblock as defined below.
- azure_blob_ Datasetstorage_ location Parquet Azure Blob Storage Location Args 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compression_codec str
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compression_level str
- Specifies the compression level. Possible values are OptimalandFastest,
- data_factory_ strid 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- description str
- The description for the Data Factory Dataset.
- folder str
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- http_server_ Datasetlocation Parquet Http Server Location Args 
- A http_server_locationblock as defined below.
- linked_service_ strname 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name str
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Mapping[str, str]
- A map of parameters to associate with the Data Factory Dataset.
- schema_columns Sequence[DatasetParquet Schema Column Args] 
- A schema_columnblock as defined below.
- additionalProperties Map<String>
- A map of additional properties to associate with the Data Factory Dataset. - The following supported locations for a Parquet Dataset: 
- annotations List<String>
- List of tags that can be used for describing the Data Factory Dataset.
- azureBlob Property MapFs Location 
- A azure_blob_fs_locationblock as defined below.
- azureBlob Property MapStorage Location 
- A - azure_blob_storage_locationblock as defined below.- The following supported arguments are specific to Parquet Dataset: 
- compressionCodec String
- The compression codec used to read/write text files. Valid values are bzip2,gzip,deflate,ZipDeflate,TarGzip,Tar,snappy, orlz4. Please note these values are case-sensitive.
- compressionLevel String
- Specifies the compression level. Possible values are OptimalandFastest,
- dataFactory StringId 
- The Data Factory ID in which to associate the Dataset with. Changing this forces a new resource.
- description String
- The description for the Data Factory Dataset.
- folder String
- The folder that this Dataset is in. If not specified, the Dataset will appear at the root level.
- httpServer Property MapLocation 
- A http_server_locationblock as defined below.
- linkedService StringName 
- The Data Factory Linked Service name in which to associate the Dataset with.
- name String
- Specifies the name of the Data Factory Dataset. Changing this forces a new resource to be created. Must be globally unique. See the Microsoft documentation for all restrictions.
- parameters Map<String>
- A map of parameters to associate with the Data Factory Dataset.
- schemaColumns List<Property Map>
- A schema_columnblock as defined below.
Supporting Types
DatasetParquetAzureBlobFsLocation, DatasetParquetAzureBlobFsLocationArgs            
- DynamicFile boolSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- FileSystem string
- The container on the Azure Data Lake Storage Account hosting the file.
- Filename string
- The filename of the file on the Azure Data Lake Storage Account.
- Path string
- The folder path to the file on the Azure Data Lake Storage Account.
- DynamicFile boolSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- FileSystem string
- The container on the Azure Data Lake Storage Account hosting the file.
- Filename string
- The filename of the file on the Azure Data Lake Storage Account.
- Path string
- The folder path to the file on the Azure Data Lake Storage Account.
- dynamicFile BooleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem String
- The container on the Azure Data Lake Storage Account hosting the file.
- filename String
- The filename of the file on the Azure Data Lake Storage Account.
- path String
- The folder path to the file on the Azure Data Lake Storage Account.
- dynamicFile booleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem string
- The container on the Azure Data Lake Storage Account hosting the file.
- filename string
- The filename of the file on the Azure Data Lake Storage Account.
- path string
- The folder path to the file on the Azure Data Lake Storage Account.
- dynamic_file_ boolsystem_ enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- file_system str
- The container on the Azure Data Lake Storage Account hosting the file.
- filename str
- The filename of the file on the Azure Data Lake Storage Account.
- path str
- The folder path to the file on the Azure Data Lake Storage Account.
- dynamicFile BooleanSystem Enabled 
- Is the file_systemusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- fileSystem String
- The container on the Azure Data Lake Storage Account hosting the file.
- filename String
- The filename of the file on the Azure Data Lake Storage Account.
- path String
- The folder path to the file on the Azure Data Lake Storage Account.
DatasetParquetAzureBlobStorageLocation, DatasetParquetAzureBlobStorageLocationArgs            
- Container string
- The container on the Azure Blob Storage Account hosting the file.
- DynamicContainer boolEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Filename string
- The filename of the file on the Azure Blob Storage Account.
- Path string
- The folder path to the file on the Azure Blob Storage Account.
- Container string
- The container on the Azure Blob Storage Account hosting the file.
- DynamicContainer boolEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Filename string
- The filename of the file on the Azure Blob Storage Account.
- Path string
- The folder path to the file on the Azure Blob Storage Account.
- container String
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer BooleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file on the Azure Blob Storage Account.
- path String
- The folder path to the file on the Azure Blob Storage Account.
- container string
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer booleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename string
- The filename of the file on the Azure Blob Storage Account.
- path string
- The folder path to the file on the Azure Blob Storage Account.
- container str
- The container on the Azure Blob Storage Account hosting the file.
- dynamic_container_ boolenabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename str
- The filename of the file on the Azure Blob Storage Account.
- path str
- The folder path to the file on the Azure Blob Storage Account.
- container String
- The container on the Azure Blob Storage Account hosting the file.
- dynamicContainer BooleanEnabled 
- Is the containerusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- filename String
- The filename of the file on the Azure Blob Storage Account.
- path String
- The folder path to the file on the Azure Blob Storage Account.
DatasetParquetHttpServerLocation, DatasetParquetHttpServerLocationArgs          
- Filename string
- The filename of the file on the web server.
- RelativeUrl string
- The base URL to the web server hosting the file.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Path string
- The folder path to the file on the web server.
- Filename string
- The filename of the file on the web server.
- RelativeUrl string
- The base URL to the web server hosting the file.
- DynamicFilename boolEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- DynamicPath boolEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- Path string
- The folder path to the file on the web server.
- filename String
- The filename of the file on the web server.
- relativeUrl String
- The base URL to the web server hosting the file.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- path String
- The folder path to the file on the web server.
- filename string
- The filename of the file on the web server.
- relativeUrl string
- The base URL to the web server hosting the file.
- dynamicFilename booleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath booleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- path string
- The folder path to the file on the web server.
- filename str
- The filename of the file on the web server.
- relative_url str
- The base URL to the web server hosting the file.
- dynamic_filename_ boolenabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamic_path_ boolenabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- path str
- The folder path to the file on the web server.
- filename String
- The filename of the file on the web server.
- relativeUrl String
- The base URL to the web server hosting the file.
- dynamicFilename BooleanEnabled 
- Is the filenameusing dynamic expression, function or system variables? Defaults tofalse.
- dynamicPath BooleanEnabled 
- Is the pathusing dynamic expression, function or system variables? Defaults tofalse.
- path String
- The folder path to the file on the web server.
DatasetParquetSchemaColumn, DatasetParquetSchemaColumnArgs        
- Name string
- The name of the column.
- Description string
- The description of the column.
- Type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- Name string
- The name of the column.
- Description string
- The description of the column.
- Type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name String
- The name of the column.
- description String
- The description of the column.
- type String
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name string
- The name of the column.
- description string
- The description of the column.
- type string
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name str
- The name of the column.
- description str
- The description of the column.
- type str
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
- name String
- The name of the column.
- description String
- The description of the column.
- type String
- Type of the column. Valid values are Byte,Byte[],Boolean,Date,DateTime,DateTimeOffset,Decimal,Double,Guid,Int16,Int32,Int64,Single,String,TimeSpan. Please note these values are case sensitive.
Import
Data Factory Datasets can be imported using the resource id, e.g.
$ pulumi import azure:datafactory/datasetParquet:DatasetParquet example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/example/providers/Microsoft.DataFactory/factories/example/datasets/example
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Azure Classic pulumi/pulumi-azure
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the azurermTerraform Provider.