aws.datasync.LocationHdfs
Manages an HDFS Location within AWS DataSync.
NOTE: The DataSync Agents must be available before creating this resource.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
const example = new aws.datasync.LocationHdfs("example", {
    agentArns: [exampleAwsDatasyncAgent.arn],
    authenticationType: "SIMPLE",
    simpleUser: "example",
    nameNodes: [{
        hostname: exampleAwsInstance.privateDns,
        port: 80,
    }],
});
import pulumi
import pulumi_aws as aws
example = aws.datasync.LocationHdfs("example",
    agent_arns=[example_aws_datasync_agent["arn"]],
    authentication_type="SIMPLE",
    simple_user="example",
    name_nodes=[{
        "hostname": example_aws_instance["privateDns"],
        "port": 80,
    }])
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/datasync"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
			AgentArns: pulumi.StringArray{
				exampleAwsDatasyncAgent.Arn,
			},
			AuthenticationType: pulumi.String("SIMPLE"),
			SimpleUser:         pulumi.String("example"),
			NameNodes: datasync.LocationHdfsNameNodeArray{
				&datasync.LocationHdfsNameNodeArgs{
					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
					Port:     pulumi.Int(80),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.DataSync.LocationHdfs("example", new()
    {
        AgentArns = new[]
        {
            exampleAwsDatasyncAgent.Arn,
        },
        AuthenticationType = "SIMPLE",
        SimpleUser = "example",
        NameNodes = new[]
        {
            new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
            {
                Hostname = exampleAwsInstance.PrivateDns,
                Port = 80,
            },
        },
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.datasync.LocationHdfs;
import com.pulumi.aws.datasync.LocationHdfsArgs;
import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new LocationHdfs("example", LocationHdfsArgs.builder()
            .agentArns(exampleAwsDatasyncAgent.arn())
            .authenticationType("SIMPLE")
            .simpleUser("example")
            .nameNodes(LocationHdfsNameNodeArgs.builder()
                .hostname(exampleAwsInstance.privateDns())
                .port(80)
                .build())
            .build());
    }
}
resources:
  example:
    type: aws:datasync:LocationHdfs
    properties:
      agentArns:
        - ${exampleAwsDatasyncAgent.arn}
      authenticationType: SIMPLE
      simpleUser: example
      nameNodes:
        - hostname: ${exampleAwsInstance.privateDns}
          port: 80
Kerberos Authentication
import * as pulumi from "@pulumi/pulumi";
import * as aws from "@pulumi/aws";
import * as std from "@pulumi/std";
const example = new aws.datasync.LocationHdfs("example", {
    agentArns: [exampleAwsDatasyncAgent.arn],
    authenticationType: "KERBEROS",
    nameNodes: [{
        hostname: exampleAwsInstance.privateDns,
        port: 80,
    }],
    kerberosPrincipal: "user@example.com",
    kerberosKeytabBase64: std.filebase64({
        input: "user.keytab",
    }).then(invoke => invoke.result),
    kerberosKrb5Conf: std.file({
        input: "krb5.conf",
    }).then(invoke => invoke.result),
});
import pulumi
import pulumi_aws as aws
import pulumi_std as std
example = aws.datasync.LocationHdfs("example",
    agent_arns=[example_aws_datasync_agent["arn"]],
    authentication_type="KERBEROS",
    name_nodes=[{
        "hostname": example_aws_instance["privateDns"],
        "port": 80,
    }],
    kerberos_principal="user@example.com",
    kerberos_keytab_base64=std.filebase64(input="user.keytab").result,
    kerberos_krb5_conf=std.file(input="krb5.conf").result)
package main
import (
	"github.com/pulumi/pulumi-aws/sdk/v7/go/aws/datasync"
	"github.com/pulumi/pulumi-std/sdk/go/std"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		invokeFilebase64, err := std.Filebase64(ctx, &std.Filebase64Args{
			Input: "user.keytab",
		}, nil)
		if err != nil {
			return err
		}
		invokeFile1, err := std.File(ctx, &std.FileArgs{
			Input: "krb5.conf",
		}, nil)
		if err != nil {
			return err
		}
		_, err = datasync.NewLocationHdfs(ctx, "example", &datasync.LocationHdfsArgs{
			AgentArns: pulumi.StringArray{
				exampleAwsDatasyncAgent.Arn,
			},
			AuthenticationType: pulumi.String("KERBEROS"),
			NameNodes: datasync.LocationHdfsNameNodeArray{
				&datasync.LocationHdfsNameNodeArgs{
					Hostname: pulumi.Any(exampleAwsInstance.PrivateDns),
					Port:     pulumi.Int(80),
				},
			},
			KerberosPrincipal:    pulumi.String("user@example.com"),
			KerberosKeytabBase64: pulumi.String(invokeFilebase64.Result),
			KerberosKrb5Conf:     pulumi.String(invokeFile1.Result),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Aws = Pulumi.Aws;
using Std = Pulumi.Std;
return await Deployment.RunAsync(() => 
{
    var example = new Aws.DataSync.LocationHdfs("example", new()
    {
        AgentArns = new[]
        {
            exampleAwsDatasyncAgent.Arn,
        },
        AuthenticationType = "KERBEROS",
        NameNodes = new[]
        {
            new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
            {
                Hostname = exampleAwsInstance.PrivateDns,
                Port = 80,
            },
        },
        KerberosPrincipal = "user@example.com",
        KerberosKeytabBase64 = Std.Filebase64.Invoke(new()
        {
            Input = "user.keytab",
        }).Apply(invoke => invoke.Result),
        KerberosKrb5Conf = Std.File.Invoke(new()
        {
            Input = "krb5.conf",
        }).Apply(invoke => invoke.Result),
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aws.datasync.LocationHdfs;
import com.pulumi.aws.datasync.LocationHdfsArgs;
import com.pulumi.aws.datasync.inputs.LocationHdfsNameNodeArgs;
import com.pulumi.std.StdFunctions;
import com.pulumi.std.inputs.Filebase64Args;
import com.pulumi.std.inputs.FileArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var example = new LocationHdfs("example", LocationHdfsArgs.builder()
            .agentArns(exampleAwsDatasyncAgent.arn())
            .authenticationType("KERBEROS")
            .nameNodes(LocationHdfsNameNodeArgs.builder()
                .hostname(exampleAwsInstance.privateDns())
                .port(80)
                .build())
            .kerberosPrincipal("user@example.com")
            .kerberosKeytabBase64(StdFunctions.filebase64(Filebase64Args.builder()
                .input("user.keytab")
                .build()).result())
            .kerberosKrb5Conf(StdFunctions.file(FileArgs.builder()
                .input("krb5.conf")
                .build()).result())
            .build());
    }
}
resources:
  example:
    type: aws:datasync:LocationHdfs
    properties:
      agentArns:
        - ${exampleAwsDatasyncAgent.arn}
      authenticationType: KERBEROS
      nameNodes:
        - hostname: ${exampleAwsInstance.privateDns}
          port: 80
      kerberosPrincipal: user@example.com
      kerberosKeytabBase64:
        fn::invoke:
          function: std:filebase64
          arguments:
            input: user.keytab
          return: result
      kerberosKrb5Conf:
        fn::invoke:
          function: std:file
          arguments:
            input: krb5.conf
          return: result
Create LocationHdfs Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LocationHdfs(name: string, args: LocationHdfsArgs, opts?: CustomResourceOptions);@overload
def LocationHdfs(resource_name: str,
                 args: LocationHdfsArgs,
                 opts: Optional[ResourceOptions] = None)
@overload
def LocationHdfs(resource_name: str,
                 opts: Optional[ResourceOptions] = None,
                 agent_arns: Optional[Sequence[str]] = None,
                 name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
                 kms_key_provider_uri: Optional[str] = None,
                 authentication_type: Optional[str] = None,
                 kerberos_keytab_base64: Optional[str] = None,
                 kerberos_krb5_conf: Optional[str] = None,
                 kerberos_krb5_conf_base64: Optional[str] = None,
                 kerberos_principal: Optional[str] = None,
                 block_size: Optional[int] = None,
                 kerberos_keytab: Optional[str] = None,
                 qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
                 region: Optional[str] = None,
                 replication_factor: Optional[int] = None,
                 simple_user: Optional[str] = None,
                 subdirectory: Optional[str] = None,
                 tags: Optional[Mapping[str, str]] = None)func NewLocationHdfs(ctx *Context, name string, args LocationHdfsArgs, opts ...ResourceOption) (*LocationHdfs, error)public LocationHdfs(string name, LocationHdfsArgs args, CustomResourceOptions? opts = null)
public LocationHdfs(String name, LocationHdfsArgs args)
public LocationHdfs(String name, LocationHdfsArgs args, CustomResourceOptions options)
type: aws:datasync:LocationHdfs
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LocationHdfsArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var locationHdfsResource = new Aws.DataSync.LocationHdfs("locationHdfsResource", new()
{
    AgentArns = new[]
    {
        "string",
    },
    NameNodes = new[]
    {
        new Aws.DataSync.Inputs.LocationHdfsNameNodeArgs
        {
            Hostname = "string",
            Port = 0,
        },
    },
    KmsKeyProviderUri = "string",
    AuthenticationType = "string",
    KerberosKeytabBase64 = "string",
    KerberosKrb5Conf = "string",
    KerberosKrb5ConfBase64 = "string",
    KerberosPrincipal = "string",
    BlockSize = 0,
    KerberosKeytab = "string",
    QopConfiguration = new Aws.DataSync.Inputs.LocationHdfsQopConfigurationArgs
    {
        DataTransferProtection = "string",
        RpcProtection = "string",
    },
    Region = "string",
    ReplicationFactor = 0,
    SimpleUser = "string",
    Subdirectory = "string",
    Tags = 
    {
        { "string", "string" },
    },
});
example, err := datasync.NewLocationHdfs(ctx, "locationHdfsResource", &datasync.LocationHdfsArgs{
	AgentArns: pulumi.StringArray{
		pulumi.String("string"),
	},
	NameNodes: datasync.LocationHdfsNameNodeArray{
		&datasync.LocationHdfsNameNodeArgs{
			Hostname: pulumi.String("string"),
			Port:     pulumi.Int(0),
		},
	},
	KmsKeyProviderUri:      pulumi.String("string"),
	AuthenticationType:     pulumi.String("string"),
	KerberosKeytabBase64:   pulumi.String("string"),
	KerberosKrb5Conf:       pulumi.String("string"),
	KerberosKrb5ConfBase64: pulumi.String("string"),
	KerberosPrincipal:      pulumi.String("string"),
	BlockSize:              pulumi.Int(0),
	KerberosKeytab:         pulumi.String("string"),
	QopConfiguration: &datasync.LocationHdfsQopConfigurationArgs{
		DataTransferProtection: pulumi.String("string"),
		RpcProtection:          pulumi.String("string"),
	},
	Region:            pulumi.String("string"),
	ReplicationFactor: pulumi.Int(0),
	SimpleUser:        pulumi.String("string"),
	Subdirectory:      pulumi.String("string"),
	Tags: pulumi.StringMap{
		"string": pulumi.String("string"),
	},
})
var locationHdfsResource = new LocationHdfs("locationHdfsResource", LocationHdfsArgs.builder()
    .agentArns("string")
    .nameNodes(LocationHdfsNameNodeArgs.builder()
        .hostname("string")
        .port(0)
        .build())
    .kmsKeyProviderUri("string")
    .authenticationType("string")
    .kerberosKeytabBase64("string")
    .kerberosKrb5Conf("string")
    .kerberosKrb5ConfBase64("string")
    .kerberosPrincipal("string")
    .blockSize(0)
    .kerberosKeytab("string")
    .qopConfiguration(LocationHdfsQopConfigurationArgs.builder()
        .dataTransferProtection("string")
        .rpcProtection("string")
        .build())
    .region("string")
    .replicationFactor(0)
    .simpleUser("string")
    .subdirectory("string")
    .tags(Map.of("string", "string"))
    .build());
location_hdfs_resource = aws.datasync.LocationHdfs("locationHdfsResource",
    agent_arns=["string"],
    name_nodes=[{
        "hostname": "string",
        "port": 0,
    }],
    kms_key_provider_uri="string",
    authentication_type="string",
    kerberos_keytab_base64="string",
    kerberos_krb5_conf="string",
    kerberos_krb5_conf_base64="string",
    kerberos_principal="string",
    block_size=0,
    kerberos_keytab="string",
    qop_configuration={
        "data_transfer_protection": "string",
        "rpc_protection": "string",
    },
    region="string",
    replication_factor=0,
    simple_user="string",
    subdirectory="string",
    tags={
        "string": "string",
    })
const locationHdfsResource = new aws.datasync.LocationHdfs("locationHdfsResource", {
    agentArns: ["string"],
    nameNodes: [{
        hostname: "string",
        port: 0,
    }],
    kmsKeyProviderUri: "string",
    authenticationType: "string",
    kerberosKeytabBase64: "string",
    kerberosKrb5Conf: "string",
    kerberosKrb5ConfBase64: "string",
    kerberosPrincipal: "string",
    blockSize: 0,
    kerberosKeytab: "string",
    qopConfiguration: {
        dataTransferProtection: "string",
        rpcProtection: "string",
    },
    region: "string",
    replicationFactor: 0,
    simpleUser: "string",
    subdirectory: "string",
    tags: {
        string: "string",
    },
});
type: aws:datasync:LocationHdfs
properties:
    agentArns:
        - string
    authenticationType: string
    blockSize: 0
    kerberosKeytab: string
    kerberosKeytabBase64: string
    kerberosKrb5Conf: string
    kerberosKrb5ConfBase64: string
    kerberosPrincipal: string
    kmsKeyProviderUri: string
    nameNodes:
        - hostname: string
          port: 0
    qopConfiguration:
        dataTransferProtection: string
        rpcProtection: string
    region: string
    replicationFactor: 0
    simpleUser: string
    subdirectory: string
    tags:
        string: string
LocationHdfs Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The LocationHdfs resource accepts the following input properties:
- AgentArns List<string>
- A list of DataSync Agent ARNs with which this location will be associated.
- NameNodes List<LocationHdfs Name Node> 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- AuthenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- BlockSize int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- KerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- KerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- KerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- KerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- KerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- KmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- QopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- Region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- ReplicationFactor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- SimpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Dictionary<string, string>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- AgentArns []string
- A list of DataSync Agent ARNs with which this location will be associated.
- NameNodes []LocationHdfs Name Node Args 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- AuthenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- BlockSize int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- KerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- KerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- KerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- KerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- KerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- KmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- QopConfiguration LocationHdfs Qop Configuration Args 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- Region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- ReplicationFactor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- SimpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- map[string]string
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- agentArns List<String>
- A list of DataSync Agent ARNs with which this location will be associated.
- nameNodes List<LocationHdfs Name Node> 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authenticationType String
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize Integer
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab String
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab StringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf String
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf StringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal String
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey StringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- qopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region String
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor Integer
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser String
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String,String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- agentArns string[]
- A list of DataSync Agent ARNs with which this location will be associated.
- nameNodes LocationHdfs Name Node[] 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize number
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- qopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor number
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- {[key: string]: string}
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- agent_arns Sequence[str]
- A list of DataSync Agent ARNs with which this location will be associated.
- name_nodes Sequence[LocationHdfs Name Node Args] 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authentication_type str
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- block_size int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos_keytab str
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberos_keytab_ strbase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberos_krb5_ strconf 
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberos_krb5_ strconf_ base64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberos_principal str
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kms_key_ strprovider_ uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- qop_configuration LocationHdfs Qop Configuration Args 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region str
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replication_factor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple_user str
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory str
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Mapping[str, str]
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- agentArns List<String>
- A list of DataSync Agent ARNs with which this location will be associated.
- nameNodes List<Property Map>
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- authenticationType String
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize Number
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab String
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab StringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf String
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf StringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal String
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey StringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- qopConfiguration Property Map
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region String
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor Number
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser String
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
Outputs
All input properties are implicitly available as output properties. Additionally, the LocationHdfs resource produces the following output properties:
Look up Existing LocationHdfs Resource
Get an existing LocationHdfs resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LocationHdfsState, opts?: CustomResourceOptions): LocationHdfs@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        agent_arns: Optional[Sequence[str]] = None,
        arn: Optional[str] = None,
        authentication_type: Optional[str] = None,
        block_size: Optional[int] = None,
        kerberos_keytab: Optional[str] = None,
        kerberos_keytab_base64: Optional[str] = None,
        kerberos_krb5_conf: Optional[str] = None,
        kerberos_krb5_conf_base64: Optional[str] = None,
        kerberos_principal: Optional[str] = None,
        kms_key_provider_uri: Optional[str] = None,
        name_nodes: Optional[Sequence[LocationHdfsNameNodeArgs]] = None,
        qop_configuration: Optional[LocationHdfsQopConfigurationArgs] = None,
        region: Optional[str] = None,
        replication_factor: Optional[int] = None,
        simple_user: Optional[str] = None,
        subdirectory: Optional[str] = None,
        tags: Optional[Mapping[str, str]] = None,
        tags_all: Optional[Mapping[str, str]] = None,
        uri: Optional[str] = None) -> LocationHdfsfunc GetLocationHdfs(ctx *Context, name string, id IDInput, state *LocationHdfsState, opts ...ResourceOption) (*LocationHdfs, error)public static LocationHdfs Get(string name, Input<string> id, LocationHdfsState? state, CustomResourceOptions? opts = null)public static LocationHdfs get(String name, Output<String> id, LocationHdfsState state, CustomResourceOptions options)resources:  _:    type: aws:datasync:LocationHdfs    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AgentArns List<string>
- A list of DataSync Agent ARNs with which this location will be associated.
- Arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- AuthenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- BlockSize int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- KerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- KerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- KerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- KerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- KerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- KmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- NameNodes List<LocationHdfs Name Node> 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- QopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- Region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- ReplicationFactor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- SimpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Dictionary<string, string>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Dictionary<string, string>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Uri string
- AgentArns []string
- A list of DataSync Agent ARNs with which this location will be associated.
- Arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- AuthenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- BlockSize int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- KerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- KerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- KerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- KerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- KerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- KmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- NameNodes []LocationHdfs Name Node Args 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- QopConfiguration LocationHdfs Qop Configuration Args 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- Region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- ReplicationFactor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- SimpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- Subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- map[string]string
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- map[string]string
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- Uri string
- agentArns List<String>
- A list of DataSync Agent ARNs with which this location will be associated.
- arn String
- Amazon Resource Name (ARN) of the DataSync Location.
- authenticationType String
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize Integer
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab String
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab StringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf String
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf StringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal String
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey StringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- nameNodes List<LocationHdfs Name Node> 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region String
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor Integer
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser String
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String,String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String,String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uri String
- agentArns string[]
- A list of DataSync Agent ARNs with which this location will be associated.
- arn string
- Amazon Resource Name (ARN) of the DataSync Location.
- authenticationType string
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize number
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab string
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab stringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf string
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf stringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal string
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey stringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- nameNodes LocationHdfs Name Node[] 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qopConfiguration LocationHdfs Qop Configuration 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region string
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor number
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser string
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory string
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- {[key: string]: string}
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- {[key: string]: string}
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uri string
- agent_arns Sequence[str]
- A list of DataSync Agent ARNs with which this location will be associated.
- arn str
- Amazon Resource Name (ARN) of the DataSync Location.
- authentication_type str
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- block_size int
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberos_keytab str
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberos_keytab_ strbase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberos_krb5_ strconf 
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberos_krb5_ strconf_ base64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberos_principal str
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kms_key_ strprovider_ uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- name_nodes Sequence[LocationHdfs Name Node Args] 
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qop_configuration LocationHdfs Qop Configuration Args 
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region str
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replication_factor int
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simple_user str
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory str
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Mapping[str, str]
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Mapping[str, str]
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uri str
- agentArns List<String>
- A list of DataSync Agent ARNs with which this location will be associated.
- arn String
- Amazon Resource Name (ARN) of the DataSync Location.
- authenticationType String
- The type of authentication used to determine the identity of the user. Valid values are SIMPLEandKERBEROS.
- blockSize Number
- The size of data blocks to write into the HDFS cluster. The block size must be a multiple of 512 bytes. The default block size is 128 mebibytes (MiB).
- kerberosKeytab String
- The Kerberos key table (keytab) that contains mappings between the defined Kerberos principal and the encrypted keys. Use kerberos_keytab_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab_base64) is required.
- kerberosKeytab StringBase64 
- Use instead of kerberos_keytabto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_keytab) is required.
- kerberosKrb5Conf String
- The krb5.conf file that contains the Kerberos configuration information. Use kerberos_krb5_conf_base64instead whenever the value is not a valid UTF-8 string. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf_base64) is required.
- kerberosKrb5Conf StringBase64 
- Use instead of kerberos_krb5_confto pass base64-encoded binary data directly. IfKERBEROSis specified forauthentication_type, this parameter (orkerberos_krb5_conf) is required.
- kerberosPrincipal String
- The Kerberos principal with access to the files and folders on the HDFS cluster. If KERBEROSis specified forauthentication_type, this parameter is required.
- kmsKey StringProvider Uri 
- The URI of the HDFS cluster's Key Management Server (KMS).
- nameNodes List<Property Map>
- The NameNode that manages the HDFS namespace. The NameNode performs operations such as opening, closing, and renaming files and directories. The NameNode contains the information to map blocks of data to the DataNodes. You can use only one NameNode. See configuration below.
- qopConfiguration Property Map
- The Quality of Protection (QOP) configuration specifies the Remote Procedure Call (RPC) and data transfer protection settings configured on the Hadoop Distributed File System (HDFS) cluster. If qop_configurationisn't specified,rpc_protectionanddata_transfer_protectiondefault toPRIVACY. If you set RpcProtection or DataTransferProtection, the other parameter assumes the same value. See configuration below.
- region String
- Region where this resource will be managed. Defaults to the Region set in the provider configuration.
- replicationFactor Number
- The number of DataNodes to replicate the data to when writing to the HDFS cluster. By default, data is replicated to three DataNodes.
- simpleUser String
- The user name used to identify the client on the host operating system. If SIMPLEis specified forauthentication_type, this parameter is required.
- subdirectory String
- A subdirectory in the HDFS cluster. This subdirectory is used to read data from or write data to the HDFS cluster. If the subdirectory isn't specified, it will default to /.
- Map<String>
- Key-value pairs of resource tags to assign to the DataSync Location. If configured with a provider default_tagsconfiguration block present, tags with matching keys will overwrite those defined at the provider-level.
- Map<String>
- A map of tags assigned to the resource, including those inherited from the provider default_tagsconfiguration block.
- uri String
Supporting Types
LocationHdfsNameNode, LocationHdfsNameNodeArgs        
- Hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- Port int
- The port that the NameNode uses to listen to client requests.
- Hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- Port int
- The port that the NameNode uses to listen to client requests.
- hostname String
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port Integer
- The port that the NameNode uses to listen to client requests.
- hostname string
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port number
- The port that the NameNode uses to listen to client requests.
- hostname str
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port int
- The port that the NameNode uses to listen to client requests.
- hostname String
- The hostname of the NameNode in the HDFS cluster. This value is the IP address or Domain Name Service (DNS) name of the NameNode. An agent that's installed on-premises uses this hostname to communicate with the NameNode in the network.
- port Number
- The port that the NameNode uses to listen to client requests.
LocationHdfsQopConfiguration, LocationHdfsQopConfigurationArgs        
- DataTransfer stringProtection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- RpcProtection string
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- DataTransfer stringProtection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- RpcProtection string
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- dataTransfer StringProtection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- rpcProtection String
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- dataTransfer stringProtection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- rpcProtection string
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- data_transfer_ strprotection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- rpc_protection str
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- dataTransfer StringProtection 
- The data transfer protection setting configured on the HDFS cluster. This setting corresponds to your dfs.data.transfer.protection setting in the hdfs-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
- rpcProtection String
- The RPC protection setting configured on the HDFS cluster. This setting corresponds to your hadoop.rpc.protection setting in your core-site.xml file on your Hadoop cluster. Valid values are DISABLED,AUTHENTICATION,INTEGRITYandPRIVACY.
Import
Identity Schema
Required
- arn(String) Amazon Resource Name (ARN) of the DataSync HDFS location.
Using pulumi import, import aws_datasync_location_hdfs using the Amazon Resource Name (ARN). For example:
console
% pulumi import aws_datasync_location_hdfs.example arn:aws:datasync:us-east-1:123456789012:location/loc-12345678901234567
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- AWS Classic pulumi/pulumi-aws
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the awsTerraform Provider.
