gcp.datafusion.Instance
Represents a Data Fusion instance.
To get more information about Instance, see:
- API documentation
- How-to Guides
Example Usage
Data Fusion Instance Basic
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var basicInstance = new Gcp.DataFusion.Instance("basicInstance", new()
{
Region = "us-central1",
Type = "BASIC",
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := datafusion.NewInstance(ctx, "basicInstance", &datafusion.InstanceArgs{
Region: pulumi.String("us-central1"),
Type: pulumi.String("BASIC"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var basicInstance = new Instance("basicInstance", InstanceArgs.builder()
.region("us-central1")
.type("BASIC")
.build());
}
}
import pulumi
import pulumi_gcp as gcp
basic_instance = gcp.datafusion.Instance("basicInstance",
region="us-central1",
type="BASIC")
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const basicInstance = new gcp.datafusion.Instance("basicInstance", {
region: "us-central1",
type: "BASIC",
});
resources:
basicInstance:
type: gcp:datafusion:Instance
properties:
region: us-central1
type: BASIC
Data Fusion Instance Full
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var @default = Gcp.AppEngine.GetDefaultServiceAccount.Invoke();
var network = new Gcp.Compute.Network("network");
var privateIpAlloc = new Gcp.Compute.GlobalAddress("privateIpAlloc", new()
{
AddressType = "INTERNAL",
Purpose = "VPC_PEERING",
PrefixLength = 22,
Network = network.Id,
});
var extendedInstance = new Gcp.DataFusion.Instance("extendedInstance", new()
{
Description = "My Data Fusion instance",
DisplayName = "My Data Fusion instance",
Region = "us-central1",
Type = "BASIC",
EnableStackdriverLogging = true,
EnableStackdriverMonitoring = true,
PrivateInstance = true,
DataprocServiceAccount = @default.Apply(@default => @default.Apply(getDefaultServiceAccountResult => getDefaultServiceAccountResult.Email)),
Labels =
{
{ "example_key", "example_value" },
},
NetworkConfig = new Gcp.DataFusion.Inputs.InstanceNetworkConfigArgs
{
Network = "default",
IpAllocation = Output.Tuple(privateIpAlloc.Address, privateIpAlloc.PrefixLength).Apply(values =>
{
var address = values.Item1;
var prefixLength = values.Item2;
return $"{address}/{prefixLength}";
}),
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/appengine"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/compute"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_default, err := appengine.GetDefaultServiceAccount(ctx, nil, nil)
if err != nil {
return err
}
network, err := compute.NewNetwork(ctx, "network", nil)
if err != nil {
return err
}
privateIpAlloc, err := compute.NewGlobalAddress(ctx, "privateIpAlloc", &compute.GlobalAddressArgs{
AddressType: pulumi.String("INTERNAL"),
Purpose: pulumi.String("VPC_PEERING"),
PrefixLength: pulumi.Int(22),
Network: network.ID(),
})
if err != nil {
return err
}
_, err = datafusion.NewInstance(ctx, "extendedInstance", &datafusion.InstanceArgs{
Description: pulumi.String("My Data Fusion instance"),
DisplayName: pulumi.String("My Data Fusion instance"),
Region: pulumi.String("us-central1"),
Type: pulumi.String("BASIC"),
EnableStackdriverLogging: pulumi.Bool(true),
EnableStackdriverMonitoring: pulumi.Bool(true),
PrivateInstance: pulumi.Bool(true),
DataprocServiceAccount: *pulumi.String(_default.Email),
Labels: pulumi.StringMap{
"example_key": pulumi.String("example_value"),
},
NetworkConfig: &datafusion.InstanceNetworkConfigArgs{
Network: pulumi.String("default"),
IpAllocation: pulumi.All(privateIpAlloc.Address, privateIpAlloc.PrefixLength).ApplyT(func(_args []interface{}) (string, error) {
address := _args[0].(string)
prefixLength := _args[1].(*int)
return fmt.Sprintf("%v/%v", address, prefixLength), nil
}).(pulumi.StringOutput),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.appengine.AppengineFunctions;
import com.pulumi.gcp.appengine.inputs.GetDefaultServiceAccountArgs;
import com.pulumi.gcp.compute.Network;
import com.pulumi.gcp.compute.GlobalAddress;
import com.pulumi.gcp.compute.GlobalAddressArgs;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import com.pulumi.gcp.datafusion.inputs.InstanceNetworkConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var default = AppengineFunctions.getDefaultServiceAccount();
var network = new Network("network");
var privateIpAlloc = new GlobalAddress("privateIpAlloc", GlobalAddressArgs.builder()
.addressType("INTERNAL")
.purpose("VPC_PEERING")
.prefixLength(22)
.network(network.id())
.build());
var extendedInstance = new Instance("extendedInstance", InstanceArgs.builder()
.description("My Data Fusion instance")
.displayName("My Data Fusion instance")
.region("us-central1")
.type("BASIC")
.enableStackdriverLogging(true)
.enableStackdriverMonitoring(true)
.privateInstance(true)
.dataprocServiceAccount(default_.email())
.labels(Map.of("example_key", "example_value"))
.networkConfig(InstanceNetworkConfigArgs.builder()
.network("default")
.ipAllocation(Output.tuple(privateIpAlloc.address(), privateIpAlloc.prefixLength()).applyValue(values -> {
var address = values.t1;
var prefixLength = values.t2;
return String.format("%s/%s", address,prefixLength);
}))
.build())
.build());
}
}
import pulumi
import pulumi_gcp as gcp
default = gcp.appengine.get_default_service_account()
network = gcp.compute.Network("network")
private_ip_alloc = gcp.compute.GlobalAddress("privateIpAlloc",
address_type="INTERNAL",
purpose="VPC_PEERING",
prefix_length=22,
network=network.id)
extended_instance = gcp.datafusion.Instance("extendedInstance",
description="My Data Fusion instance",
display_name="My Data Fusion instance",
region="us-central1",
type="BASIC",
enable_stackdriver_logging=True,
enable_stackdriver_monitoring=True,
private_instance=True,
dataproc_service_account=default.email,
labels={
"example_key": "example_value",
},
network_config=gcp.datafusion.InstanceNetworkConfigArgs(
network="default",
ip_allocation=pulumi.Output.all(private_ip_alloc.address, private_ip_alloc.prefix_length).apply(lambda address, prefix_length: f"{address}/{prefix_length}"),
))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const default = gcp.appengine.getDefaultServiceAccount({});
const network = new gcp.compute.Network("network", {});
const privateIpAlloc = new gcp.compute.GlobalAddress("privateIpAlloc", {
addressType: "INTERNAL",
purpose: "VPC_PEERING",
prefixLength: 22,
network: network.id,
});
const extendedInstance = new gcp.datafusion.Instance("extendedInstance", {
description: "My Data Fusion instance",
displayName: "My Data Fusion instance",
region: "us-central1",
type: "BASIC",
enableStackdriverLogging: true,
enableStackdriverMonitoring: true,
privateInstance: true,
dataprocServiceAccount: _default.then(_default => _default.email),
labels: {
example_key: "example_value",
},
networkConfig: {
network: "default",
ipAllocation: pulumi.all([privateIpAlloc.address, privateIpAlloc.prefixLength]).apply(([address, prefixLength]) => `${address}/${prefixLength}`),
},
});
resources:
extendedInstance:
type: gcp:datafusion:Instance
properties:
description: My Data Fusion instance
displayName: My Data Fusion instance
region: us-central1
type: BASIC
enableStackdriverLogging: true
enableStackdriverMonitoring: true
privateInstance: true
dataprocServiceAccount: ${default.email}
labels:
example_key: example_value
networkConfig:
network: default
ipAllocation: ${privateIpAlloc.address}/${privateIpAlloc.prefixLength}
network:
type: gcp:compute:Network
privateIpAlloc:
type: gcp:compute:GlobalAddress
properties:
addressType: INTERNAL
purpose: VPC_PEERING
prefixLength: 22
network: ${network.id}
variables:
default:
fn::invoke:
Function: gcp:appengine:getDefaultServiceAccount
Arguments: {}
Data Fusion Instance Cmek
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var keyRing = new Gcp.Kms.KeyRing("keyRing", new()
{
Location = "us-central1",
});
var cryptoKey = new Gcp.Kms.CryptoKey("cryptoKey", new()
{
KeyRing = keyRing.Id,
});
var project = Gcp.Organizations.GetProject.Invoke();
var cryptoKeyBinding = new Gcp.Kms.CryptoKeyIAMBinding("cryptoKeyBinding", new()
{
CryptoKeyId = cryptoKey.Id,
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Members = new[]
{
$"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datafusion.iam.gserviceaccount.com",
},
});
var cmek = new Gcp.DataFusion.Instance("cmek", new()
{
Region = "us-central1",
Type = "BASIC",
CryptoKeyConfig = new Gcp.DataFusion.Inputs.InstanceCryptoKeyConfigArgs
{
KeyReference = cryptoKey.Id,
},
}, new CustomResourceOptions
{
DependsOn = new[]
{
cryptoKeyBinding,
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/organizations"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
keyRing, err := kms.NewKeyRing(ctx, "keyRing", &kms.KeyRingArgs{
Location: pulumi.String("us-central1"),
})
if err != nil {
return err
}
cryptoKey, err := kms.NewCryptoKey(ctx, "cryptoKey", &kms.CryptoKeyArgs{
KeyRing: keyRing.ID(),
})
if err != nil {
return err
}
project, err := organizations.LookupProject(ctx, nil, nil)
if err != nil {
return err
}
cryptoKeyBinding, err := kms.NewCryptoKeyIAMBinding(ctx, "cryptoKeyBinding", &kms.CryptoKeyIAMBindingArgs{
CryptoKeyId: cryptoKey.ID(),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Members: pulumi.StringArray{
pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datafusion.iam.gserviceaccount.com", project.Number)),
},
})
if err != nil {
return err
}
_, err = datafusion.NewInstance(ctx, "cmek", &datafusion.InstanceArgs{
Region: pulumi.String("us-central1"),
Type: pulumi.String("BASIC"),
CryptoKeyConfig: &datafusion.InstanceCryptoKeyConfigArgs{
KeyReference: cryptoKey.ID(),
},
}, pulumi.DependsOn([]pulumi.Resource{
cryptoKeyBinding,
}))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.kms.KeyRing;
import com.pulumi.gcp.kms.KeyRingArgs;
import com.pulumi.gcp.kms.CryptoKey;
import com.pulumi.gcp.kms.CryptoKeyArgs;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMBinding;
import com.pulumi.gcp.kms.CryptoKeyIAMBindingArgs;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import com.pulumi.gcp.datafusion.inputs.InstanceCryptoKeyConfigArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var keyRing = new KeyRing("keyRing", KeyRingArgs.builder()
.location("us-central1")
.build());
var cryptoKey = new CryptoKey("cryptoKey", CryptoKeyArgs.builder()
.keyRing(keyRing.id())
.build());
final var project = OrganizationsFunctions.getProject();
var cryptoKeyBinding = new CryptoKeyIAMBinding("cryptoKeyBinding", CryptoKeyIAMBindingArgs.builder()
.cryptoKeyId(cryptoKey.id())
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.members(String.format("serviceAccount:service-%s@gcp-sa-datafusion.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var cmek = new Instance("cmek", InstanceArgs.builder()
.region("us-central1")
.type("BASIC")
.cryptoKeyConfig(InstanceCryptoKeyConfigArgs.builder()
.keyReference(cryptoKey.id())
.build())
.build(), CustomResourceOptions.builder()
.dependsOn(cryptoKeyBinding)
.build());
}
}
import pulumi
import pulumi_gcp as gcp
key_ring = gcp.kms.KeyRing("keyRing", location="us-central1")
crypto_key = gcp.kms.CryptoKey("cryptoKey", key_ring=key_ring.id)
project = gcp.organizations.get_project()
crypto_key_binding = gcp.kms.CryptoKeyIAMBinding("cryptoKeyBinding",
crypto_key_id=crypto_key.id,
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
members=[f"serviceAccount:service-{project.number}@gcp-sa-datafusion.iam.gserviceaccount.com"])
cmek = gcp.datafusion.Instance("cmek",
region="us-central1",
type="BASIC",
crypto_key_config=gcp.datafusion.InstanceCryptoKeyConfigArgs(
key_reference=crypto_key.id,
),
opts=pulumi.ResourceOptions(depends_on=[crypto_key_binding]))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const keyRing = new gcp.kms.KeyRing("keyRing", {location: "us-central1"});
const cryptoKey = new gcp.kms.CryptoKey("cryptoKey", {keyRing: keyRing.id});
const project = gcp.organizations.getProject({});
const cryptoKeyBinding = new gcp.kms.CryptoKeyIAMBinding("cryptoKeyBinding", {
cryptoKeyId: cryptoKey.id,
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
members: [project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datafusion.iam.gserviceaccount.com`)],
});
const cmek = new gcp.datafusion.Instance("cmek", {
region: "us-central1",
type: "BASIC",
cryptoKeyConfig: {
keyReference: cryptoKey.id,
},
}, {
dependsOn: [cryptoKeyBinding],
});
resources:
cmek:
type: gcp:datafusion:Instance
properties:
region: us-central1
type: BASIC
cryptoKeyConfig:
keyReference: ${cryptoKey.id}
options:
dependson:
- ${cryptoKeyBinding}
cryptoKey:
type: gcp:kms:CryptoKey
properties:
keyRing: ${keyRing.id}
keyRing:
type: gcp:kms:KeyRing
properties:
location: us-central1
cryptoKeyBinding:
type: gcp:kms:CryptoKeyIAMBinding
properties:
cryptoKeyId: ${cryptoKey.id}
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
members:
- serviceAccount:service-${project.number}@gcp-sa-datafusion.iam.gserviceaccount.com
variables:
project:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
Data Fusion Instance Enterprise
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var enterpriseInstance = new Gcp.DataFusion.Instance("enterpriseInstance", new()
{
EnableRbac = true,
Region = "us-central1",
Type = "ENTERPRISE",
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := datafusion.NewInstance(ctx, "enterpriseInstance", &datafusion.InstanceArgs{
EnableRbac: pulumi.Bool(true),
Region: pulumi.String("us-central1"),
Type: pulumi.String("ENTERPRISE"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var enterpriseInstance = new Instance("enterpriseInstance", InstanceArgs.builder()
.enableRbac(true)
.region("us-central1")
.type("ENTERPRISE")
.build());
}
}
import pulumi
import pulumi_gcp as gcp
enterprise_instance = gcp.datafusion.Instance("enterpriseInstance",
enable_rbac=True,
region="us-central1",
type="ENTERPRISE")
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const enterpriseInstance = new gcp.datafusion.Instance("enterpriseInstance", {
enableRbac: true,
region: "us-central1",
type: "ENTERPRISE",
});
resources:
enterpriseInstance:
type: gcp:datafusion:Instance
properties:
enableRbac: true
region: us-central1
type: ENTERPRISE
Data Fusion Instance Event
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var eventTopic = new Gcp.PubSub.Topic("eventTopic");
var eventInstance = new Gcp.DataFusion.Instance("eventInstance", new()
{
Region = "us-central1",
Type = "BASIC",
EventPublishConfig = new Gcp.DataFusion.Inputs.InstanceEventPublishConfigArgs
{
Enabled = true,
Topic = eventTopic.Id,
},
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/pubsub"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
eventTopic, err := pubsub.NewTopic(ctx, "eventTopic", nil)
if err != nil {
return err
}
_, err = datafusion.NewInstance(ctx, "eventInstance", &datafusion.InstanceArgs{
Region: pulumi.String("us-central1"),
Type: pulumi.String("BASIC"),
EventPublishConfig: &datafusion.InstanceEventPublishConfigArgs{
Enabled: pulumi.Bool(true),
Topic: eventTopic.ID(),
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.pubsub.Topic;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import com.pulumi.gcp.datafusion.inputs.InstanceEventPublishConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var eventTopic = new Topic("eventTopic");
var eventInstance = new Instance("eventInstance", InstanceArgs.builder()
.region("us-central1")
.type("BASIC")
.eventPublishConfig(InstanceEventPublishConfigArgs.builder()
.enabled(true)
.topic(eventTopic.id())
.build())
.build());
}
}
import pulumi
import pulumi_gcp as gcp
event_topic = gcp.pubsub.Topic("eventTopic")
event_instance = gcp.datafusion.Instance("eventInstance",
region="us-central1",
type="BASIC",
event_publish_config=gcp.datafusion.InstanceEventPublishConfigArgs(
enabled=True,
topic=event_topic.id,
))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const eventTopic = new gcp.pubsub.Topic("eventTopic", {});
const eventInstance = new gcp.datafusion.Instance("eventInstance", {
region: "us-central1",
type: "BASIC",
eventPublishConfig: {
enabled: true,
topic: eventTopic.id,
},
});
resources:
eventInstance:
type: gcp:datafusion:Instance
properties:
region: us-central1
type: BASIC
eventPublishConfig:
enabled: true
topic: ${eventTopic.id}
eventTopic:
type: gcp:pubsub:Topic
Data Fusion Instance Zone
using System.Collections.Generic;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var zone = new Gcp.DataFusion.Instance("zone", new()
{
Region = "us-central1",
Type = "DEVELOPER",
Zone = "us-central1-a",
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datafusion"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := datafusion.NewInstance(ctx, "zone", &datafusion.InstanceArgs{
Region: pulumi.String("us-central1"),
Type: pulumi.String("DEVELOPER"),
Zone: pulumi.String("us-central1-a"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datafusion.Instance;
import com.pulumi.gcp.datafusion.InstanceArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var zone = new Instance("zone", InstanceArgs.builder()
.region("us-central1")
.type("DEVELOPER")
.zone("us-central1-a")
.build());
}
}
import pulumi
import pulumi_gcp as gcp
zone = gcp.datafusion.Instance("zone",
region="us-central1",
type="DEVELOPER",
zone="us-central1-a")
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const zone = new gcp.datafusion.Instance("zone", {
region: "us-central1",
type: "DEVELOPER",
zone: "us-central1-a",
});
resources:
zone:
type: gcp:datafusion:Instance
properties:
region: us-central1
type: DEVELOPER
zone: us-central1-a
Create Instance Resource
new Instance(name: string, args: InstanceArgs, opts?: CustomResourceOptions);
@overload
def Instance(resource_name: str,
opts: Optional[ResourceOptions] = None,
crypto_key_config: Optional[InstanceCryptoKeyConfigArgs] = None,
dataproc_service_account: Optional[str] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
enable_rbac: Optional[bool] = None,
enable_stackdriver_logging: Optional[bool] = None,
enable_stackdriver_monitoring: Optional[bool] = None,
event_publish_config: Optional[InstanceEventPublishConfigArgs] = None,
labels: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
network_config: Optional[InstanceNetworkConfigArgs] = None,
options: Optional[Mapping[str, str]] = None,
private_instance: Optional[bool] = None,
project: Optional[str] = None,
region: Optional[str] = None,
type: Optional[str] = None,
version: Optional[str] = None,
zone: Optional[str] = None)
@overload
def Instance(resource_name: str,
args: InstanceArgs,
opts: Optional[ResourceOptions] = None)
func NewInstance(ctx *Context, name string, args InstanceArgs, opts ...ResourceOption) (*Instance, error)
public Instance(string name, InstanceArgs args, CustomResourceOptions? opts = null)
public Instance(String name, InstanceArgs args)
public Instance(String name, InstanceArgs args, CustomResourceOptions options)
type: gcp:datafusion:Instance
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args InstanceArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args InstanceArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args InstanceArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args InstanceArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args InstanceArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Instance Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Instance resource accepts the following input properties:
- Type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- Crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- Dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- Description string
An optional description of the instance.
- Display
Name string Display name for an instance.
- Enable
Rbac bool Option to enable granular role-based access control.
- Enable
Stackdriver boolLogging Option to enable Stackdriver Logging.
- Enable
Stackdriver boolMonitoring Option to enable Stackdriver Monitoring.
- Event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- Labels Dictionary<string, string>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- Name string
The ID of the instance or a fully qualified identifier for the instance.
- Network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- Options Dictionary<string, string>
Map of additional options used to configure the behavior of Data Fusion instance.
- Private
Instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Region string
The region of the Data Fusion instance.
- Version string
Current version of the Data Fusion.
- Zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- Type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- Crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- Dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- Description string
An optional description of the instance.
- Display
Name string Display name for an instance.
- Enable
Rbac bool Option to enable granular role-based access control.
- Enable
Stackdriver boolLogging Option to enable Stackdriver Logging.
- Enable
Stackdriver boolMonitoring Option to enable Stackdriver Monitoring.
- Event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- Labels map[string]string
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- Name string
The ID of the instance or a fully qualified identifier for the instance.
- Network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- Options map[string]string
Map of additional options used to configure the behavior of Data Fusion instance.
- Private
Instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Region string
The region of the Data Fusion instance.
- Version string
Current version of the Data Fusion.
- Zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- type String
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service StringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description String
An optional description of the instance.
- display
Name String Display name for an instance.
- enable
Rbac Boolean Option to enable granular role-based access control.
- enable
Stackdriver BooleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver BooleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- labels Map<String,String>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name String
The ID of the instance or a fully qualified identifier for the instance.
- network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Map<String,String>
Map of additional options used to configure the behavior of Data Fusion instance.
- private
Instance Boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region String
The region of the Data Fusion instance.
- version String
Current version of the Data Fusion.
- zone String
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description string
An optional description of the instance.
- display
Name string Display name for an instance.
- enable
Rbac boolean Option to enable granular role-based access control.
- enable
Stackdriver booleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver booleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- labels {[key: string]: string}
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name string
The ID of the instance or a fully qualified identifier for the instance.
- network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options {[key: string]: string}
Map of additional options used to configure the behavior of Data Fusion instance.
- private
Instance boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region string
The region of the Data Fusion instance.
- version string
Current version of the Data Fusion.
- zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- type str
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- crypto_
key_ Instanceconfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc_
service_ straccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description str
An optional description of the instance.
- display_
name str Display name for an instance.
- enable_
rbac bool Option to enable granular role-based access control.
- enable_
stackdriver_ boollogging Option to enable Stackdriver Logging.
- enable_
stackdriver_ boolmonitoring Option to enable Stackdriver Monitoring.
- event_
publish_ Instanceconfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- labels Mapping[str, str]
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name str
The ID of the instance or a fully qualified identifier for the instance.
- network_
config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Mapping[str, str]
Map of additional options used to configure the behavior of Data Fusion instance.
- private_
instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region str
The region of the Data Fusion instance.
- version str
Current version of the Data Fusion.
- zone str
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- type String
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- crypto
Key Property MapConfig The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service StringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description String
An optional description of the instance.
- display
Name String Display name for an instance.
- enable
Rbac Boolean Option to enable granular role-based access control.
- enable
Stackdriver BooleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver BooleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish Property MapConfig Option to enable and pass metadata for event publishing. Structure is documented below.
- labels Map<String>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name String
The ID of the instance or a fully qualified identifier for the instance.
- network
Config Property Map Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Map<String>
Map of additional options used to configure the behavior of Data Fusion instance.
- private
Instance Boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region String
The region of the Data Fusion instance.
- version String
Current version of the Data Fusion.
- zone String
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
Outputs
All input properties are implicitly available as output properties. Additionally, the Instance resource produces the following output properties:
- Api
Endpoint string Endpoint on which the REST APIs is accessible.
- Create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- Id string
The provider-assigned unique ID for this managed resource.
- P4Service
Account string P4 service account for the customer project.
- Service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- Service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- State string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- State
Message string Additional information about the current state of this Data Fusion instance if available.
- Tenant
Project stringId The name of the tenant project.
- Update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Api
Endpoint string Endpoint on which the REST APIs is accessible.
- Create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- Id string
The provider-assigned unique ID for this managed resource.
- P4Service
Account string P4 service account for the customer project.
- Service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- Service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- State string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- State
Message string Additional information about the current state of this Data Fusion instance if available.
- Tenant
Project stringId The name of the tenant project.
- Update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- api
Endpoint String Endpoint on which the REST APIs is accessible.
- create
Time String The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- gcs
Bucket String Cloud Storage bucket generated by Data Fusion in the customer project.
- id String
The provider-assigned unique ID for this managed resource.
- p4Service
Account String P4 service account for the customer project.
- service
Account String Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint String Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state String
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message String Additional information about the current state of this Data Fusion instance if available.
- tenant
Project StringId The name of the tenant project.
- update
Time String The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- api
Endpoint string Endpoint on which the REST APIs is accessible.
- create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- id string
The provider-assigned unique ID for this managed resource.
- p4Service
Account string P4 service account for the customer project.
- service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message string Additional information about the current state of this Data Fusion instance if available.
- tenant
Project stringId The name of the tenant project.
- update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- api_
endpoint str Endpoint on which the REST APIs is accessible.
- create_
time str The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- gcs_
bucket str Cloud Storage bucket generated by Data Fusion in the customer project.
- id str
The provider-assigned unique ID for this managed resource.
- p4_
service_ straccount P4 service account for the customer project.
- service_
account str Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service_
endpoint str Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state str
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state_
message str Additional information about the current state of this Data Fusion instance if available.
- tenant_
project_ strid The name of the tenant project.
- update_
time str The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- api
Endpoint String Endpoint on which the REST APIs is accessible.
- create
Time String The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- gcs
Bucket String Cloud Storage bucket generated by Data Fusion in the customer project.
- id String
The provider-assigned unique ID for this managed resource.
- p4Service
Account String P4 service account for the customer project.
- service
Account String Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint String Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state String
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message String Additional information about the current state of this Data Fusion instance if available.
- tenant
Project StringId The name of the tenant project.
- update
Time String The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
Look up Existing Instance Resource
Get an existing Instance resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: InstanceState, opts?: CustomResourceOptions): Instance
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
api_endpoint: Optional[str] = None,
create_time: Optional[str] = None,
crypto_key_config: Optional[InstanceCryptoKeyConfigArgs] = None,
dataproc_service_account: Optional[str] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
enable_rbac: Optional[bool] = None,
enable_stackdriver_logging: Optional[bool] = None,
enable_stackdriver_monitoring: Optional[bool] = None,
event_publish_config: Optional[InstanceEventPublishConfigArgs] = None,
gcs_bucket: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
network_config: Optional[InstanceNetworkConfigArgs] = None,
options: Optional[Mapping[str, str]] = None,
p4_service_account: Optional[str] = None,
private_instance: Optional[bool] = None,
project: Optional[str] = None,
region: Optional[str] = None,
service_account: Optional[str] = None,
service_endpoint: Optional[str] = None,
state: Optional[str] = None,
state_message: Optional[str] = None,
tenant_project_id: Optional[str] = None,
type: Optional[str] = None,
update_time: Optional[str] = None,
version: Optional[str] = None,
zone: Optional[str] = None) -> Instance
func GetInstance(ctx *Context, name string, id IDInput, state *InstanceState, opts ...ResourceOption) (*Instance, error)
public static Instance Get(string name, Input<string> id, InstanceState? state, CustomResourceOptions? opts = null)
public static Instance get(String name, Output<String> id, InstanceState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Api
Endpoint string Endpoint on which the REST APIs is accessible.
- Create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- Dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- Description string
An optional description of the instance.
- Display
Name string Display name for an instance.
- Enable
Rbac bool Option to enable granular role-based access control.
- Enable
Stackdriver boolLogging Option to enable Stackdriver Logging.
- Enable
Stackdriver boolMonitoring Option to enable Stackdriver Monitoring.
- Event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- Gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- Labels Dictionary<string, string>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- Name string
The ID of the instance or a fully qualified identifier for the instance.
- Network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- Options Dictionary<string, string>
Map of additional options used to configure the behavior of Data Fusion instance.
- P4Service
Account string P4 service account for the customer project.
- Private
Instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Region string
The region of the Data Fusion instance.
- Service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- Service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- State string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- State
Message string Additional information about the current state of this Data Fusion instance if available.
- Tenant
Project stringId The name of the tenant project.
- Type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- Update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Version string
Current version of the Data Fusion.
- Zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- Api
Endpoint string Endpoint on which the REST APIs is accessible.
- Create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- Dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- Description string
An optional description of the instance.
- Display
Name string Display name for an instance.
- Enable
Rbac bool Option to enable granular role-based access control.
- Enable
Stackdriver boolLogging Option to enable Stackdriver Logging.
- Enable
Stackdriver boolMonitoring Option to enable Stackdriver Monitoring.
- Event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- Gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- Labels map[string]string
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- Name string
The ID of the instance or a fully qualified identifier for the instance.
- Network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- Options map[string]string
Map of additional options used to configure the behavior of Data Fusion instance.
- P4Service
Account string P4 service account for the customer project.
- Private
Instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Region string
The region of the Data Fusion instance.
- Service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- Service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- State string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- State
Message string Additional information about the current state of this Data Fusion instance if available.
- Tenant
Project stringId The name of the tenant project.
- Type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- Update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- Version string
Current version of the Data Fusion.
- Zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- api
Endpoint String Endpoint on which the REST APIs is accessible.
- create
Time String The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service StringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description String
An optional description of the instance.
- display
Name String Display name for an instance.
- enable
Rbac Boolean Option to enable granular role-based access control.
- enable
Stackdriver BooleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver BooleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- gcs
Bucket String Cloud Storage bucket generated by Data Fusion in the customer project.
- labels Map<String,String>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name String
The ID of the instance or a fully qualified identifier for the instance.
- network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Map<String,String>
Map of additional options used to configure the behavior of Data Fusion instance.
- p4Service
Account String P4 service account for the customer project.
- private
Instance Boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region String
The region of the Data Fusion instance.
- service
Account String Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint String Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state String
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message String Additional information about the current state of this Data Fusion instance if available.
- tenant
Project StringId The name of the tenant project.
- type String
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- update
Time String The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- version String
Current version of the Data Fusion.
- zone String
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- api
Endpoint string Endpoint on which the REST APIs is accessible.
- create
Time string The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- crypto
Key InstanceConfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service stringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description string
An optional description of the instance.
- display
Name string Display name for an instance.
- enable
Rbac boolean Option to enable granular role-based access control.
- enable
Stackdriver booleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver booleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish InstanceConfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- gcs
Bucket string Cloud Storage bucket generated by Data Fusion in the customer project.
- labels {[key: string]: string}
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name string
The ID of the instance or a fully qualified identifier for the instance.
- network
Config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options {[key: string]: string}
Map of additional options used to configure the behavior of Data Fusion instance.
- p4Service
Account string P4 service account for the customer project.
- private
Instance boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region string
The region of the Data Fusion instance.
- service
Account string Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint string Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state string
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message string Additional information about the current state of this Data Fusion instance if available.
- tenant
Project stringId The name of the tenant project.
- type string
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- update
Time string The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- version string
Current version of the Data Fusion.
- zone string
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- api_
endpoint str Endpoint on which the REST APIs is accessible.
- create_
time str The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- crypto_
key_ Instanceconfig Crypto Key Config Args The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc_
service_ straccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description str
An optional description of the instance.
- display_
name str Display name for an instance.
- enable_
rbac bool Option to enable granular role-based access control.
- enable_
stackdriver_ boollogging Option to enable Stackdriver Logging.
- enable_
stackdriver_ boolmonitoring Option to enable Stackdriver Monitoring.
- event_
publish_ Instanceconfig Event Publish Config Args Option to enable and pass metadata for event publishing. Structure is documented below.
- gcs_
bucket str Cloud Storage bucket generated by Data Fusion in the customer project.
- labels Mapping[str, str]
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name str
The ID of the instance or a fully qualified identifier for the instance.
- network_
config InstanceNetwork Config Args Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Mapping[str, str]
Map of additional options used to configure the behavior of Data Fusion instance.
- p4_
service_ straccount P4 service account for the customer project.
- private_
instance bool Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region str
The region of the Data Fusion instance.
- service_
account str Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service_
endpoint str Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state str
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state_
message str Additional information about the current state of this Data Fusion instance if available.
- tenant_
project_ strid The name of the tenant project.
- type str
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- update_
time str The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- version str
Current version of the Data Fusion.
- zone str
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
- api
Endpoint String Endpoint on which the REST APIs is accessible.
- create
Time String The time the instance was created in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- crypto
Key Property MapConfig The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature. Structure is documented below.
- dataproc
Service StringAccount User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines.
- description String
An optional description of the instance.
- display
Name String Display name for an instance.
- enable
Rbac Boolean Option to enable granular role-based access control.
- enable
Stackdriver BooleanLogging Option to enable Stackdriver Logging.
- enable
Stackdriver BooleanMonitoring Option to enable Stackdriver Monitoring.
- event
Publish Property MapConfig Option to enable and pass metadata for event publishing. Structure is documented below.
- gcs
Bucket String Cloud Storage bucket generated by Data Fusion in the customer project.
- labels Map<String>
The resource labels for instance to use to annotate any related underlying resources, such as Compute Engine VMs.
- name String
The ID of the instance or a fully qualified identifier for the instance.
- network
Config Property Map Network configuration options. These are required when a private Data Fusion instance is to be created. Structure is documented below.
- options Map<String>
Map of additional options used to configure the behavior of Data Fusion instance.
- p4Service
Account String P4 service account for the customer project.
- private
Instance Boolean Specifies whether the Data Fusion instance should be private. If set to true, all Data Fusion nodes will have private IP addresses and will not be able to access the public internet.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- region String
The region of the Data Fusion instance.
- service
Account String Deprecated. Use 'tenant_project_id' instead to extract the tenant project ID.
Use
tenant_project_id
instead to extract the tenant project ID.- service
Endpoint String Endpoint on which the Data Fusion UI and REST APIs are accessible.
- state String
The current state of this Data Fusion instance.
- CREATING: Instance is being created
- RUNNING: Instance is running and ready for requests
- FAILED: Instance creation failed
- DELETING: Instance is being deleted
- UPGRADING: Instance is being upgraded
- RESTARTING: Instance is being restarted
- state
Message String Additional information about the current state of this Data Fusion instance if available.
- tenant
Project StringId The name of the tenant project.
- type String
Represents the type of Data Fusion instance. Each type is configured with the default settings for processing and memory.
- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines using point and click UI. However, there are certain limitations, such as fewer number of concurrent pipelines, no support for streaming pipelines, etc.
- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features available, such as support for streaming pipelines, higher number of concurrent pipelines, etc.
- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but
with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration
pipelines at low cost.
Possible values are
BASIC
,ENTERPRISE
, andDEVELOPER
.
- update
Time String The time the instance was last updated in RFC3339 UTC "Zulu" format, accurate to nanoseconds.
- version String
Current version of the Data Fusion.
- zone String
Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field.
Supporting Types
InstanceCryptoKeyConfig
- Key
Reference string The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
- Key
Reference string The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
- key
Reference String The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
- key
Reference string The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
- key_
reference str The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
- key
Reference String The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects//locations//keyRings//cryptoKeys/.
InstanceEventPublishConfig
InstanceNetworkConfig
- Ip
Allocation string The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- Network string
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
- Ip
Allocation string The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- Network string
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
- ip
Allocation String The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- network String
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
- ip
Allocation string The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- network string
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
- ip_
allocation str The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- network str
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
- ip
Allocation String The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network.
- network String
Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network}
Import
Instance can be imported using any of these accepted formats
$ pulumi import gcp:datafusion/instance:Instance default projects/{{project}}/locations/{{region}}/instances/{{name}}
$ pulumi import gcp:datafusion/instance:Instance default {{project}}/{{region}}/{{name}}
$ pulumi import gcp:datafusion/instance:Instance default {{region}}/{{name}}
$ pulumi import gcp:datafusion/instance:Instance default {{name}}
Package Details
- Repository
- Google Cloud (GCP) Classic pulumi/pulumi-gcp
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
google-beta
Terraform Provider.