gcp.datastream.Stream
Explore with Pulumi AI
A resource representing streaming data from a source to a destination.
To get more information about Stream, see:
- API documentation
- How-to Guides
Example Usage
Datastream Stream Full
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = true,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("sourceConnectionProfile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var bucket = new Gcp.Storage.Bucket("bucket", new()
{
Location = "US",
UniformBucketLevelAccess = true,
});
var viewer = new Gcp.Storage.BucketIAMMember("viewer", new()
{
Bucket = bucket.Name,
Role = "roles/storage.objectViewer",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var creator = new Gcp.Storage.BucketIAMMember("creator", new()
{
Bucket = bucket.Name,
Role = "roles/storage.objectCreator",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var reader = new Gcp.Storage.BucketIAMMember("reader", new()
{
Bucket = bucket.Name,
Role = "roles/storage.legacyBucketReader",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var keyUser = new Gcp.Kms.CryptoKeyIAMMember("keyUser", new()
{
CryptoKeyId = "kms-name",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Member = $"serviceAccount:service-{project.Apply(getProjectResult => getProjectResult.Number)}@gcp-sa-datastream.iam.gserviceaccount.com",
});
var destinationConnectionProfile = new Gcp.Datastream.ConnectionProfile("destinationConnectionProfile", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
GcsProfile = new Gcp.Datastream.Inputs.ConnectionProfileGcsProfileArgs
{
Bucket = bucket.Name,
RootPath = "/path",
},
});
var @default = new Gcp.Datastream.Stream("default", new()
{
StreamId = "my-stream",
DesiredState = "NOT_STARTED",
Location = "us-central1",
DisplayName = "my stream",
Labels =
{
{ "key", "value" },
},
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigArgs
{
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "includedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "includedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs
{
Table = "excludedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "excludedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
},
},
},
},
MaxConcurrentCdcTasks = 5,
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile.Id,
GcsDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigArgs
{
Path = "mydata",
FileRotationMb = 200,
FileRotationInterval = "60s",
JsonFileFormat = new Gcp.Datastream.Inputs.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs
{
SchemaFileFormat = "NO_SCHEMA_FILE",
Compression = "GZIP",
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
MysqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsArgs
{
MysqlDatabases = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs
{
Database = "my-database",
MysqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs
{
Table = "excludedTable",
MysqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
{
Column = "excludedColumn",
DataType = "VARCHAR",
Collation = "utf8mb4",
PrimaryKey = false,
Nullable = false,
OrdinalPosition = 0,
},
},
},
},
},
},
},
},
CustomerManagedEncryptionKey = "kms-name",
}, new CustomResourceOptions
{
DependsOn = new[]
{
keyUser,
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/sql"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/storage"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
project, err := organizations.LookupProject(ctx, nil, nil)
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "sourceConnectionProfile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
bucket, err := storage.NewBucket(ctx, "bucket", &storage.BucketArgs{
Location: pulumi.String("US"),
UniformBucketLevelAccess: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "viewer", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.objectViewer"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "creator", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.objectCreator"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
_, err = storage.NewBucketIAMMember(ctx, "reader", &storage.BucketIAMMemberArgs{
Bucket: bucket.Name,
Role: pulumi.String("roles/storage.legacyBucketReader"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
keyUser, err := kms.NewCryptoKeyIAMMember(ctx, "keyUser", &kms.CryptoKeyIAMMemberArgs{
CryptoKeyId: pulumi.String("kms-name"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:service-%v@gcp-sa-datastream.iam.gserviceaccount.com", project.Number)),
})
if err != nil {
return err
}
destinationConnectionProfile, err := datastream.NewConnectionProfile(ctx, "destinationConnectionProfile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
GcsProfile: &datastream.ConnectionProfileGcsProfileArgs{
Bucket: bucket.Name,
RootPath: pulumi.String("/path"),
},
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("NOT_STARTED"),
Location: pulumi.String("us-central1"),
DisplayName: pulumi.String("my stream"),
Labels: pulumi.StringMap{
"key": pulumi.String("value"),
},
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: &datastream.StreamSourceConfigMysqlSourceConfigArgs{
IncludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("includedTable"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("includedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs{
MysqlDatabases: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("excludedTable"),
MysqlColumns: datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("excludedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
},
},
},
},
MaxConcurrentCdcTasks: pulumi.Int(5),
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile.ID(),
GcsDestinationConfig: &datastream.StreamDestinationConfigGcsDestinationConfigArgs{
Path: pulumi.String("mydata"),
FileRotationMb: pulumi.Int(200),
FileRotationInterval: pulumi.String("60s"),
JsonFileFormat: &datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs{
SchemaFileFormat: pulumi.String("NO_SCHEMA_FILE"),
Compression: pulumi.String("GZIP"),
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
MysqlExcludedObjects: &datastream.StreamBackfillAllMysqlExcludedObjectsArgs{
MysqlDatabases: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs{
Database: pulumi.String("my-database"),
MysqlTables: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs{
Table: pulumi.String("excludedTable"),
MysqlColumns: datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArray{
&datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs{
Column: pulumi.String("excludedColumn"),
DataType: pulumi.String("VARCHAR"),
Collation: pulumi.String("utf8mb4"),
PrimaryKey: pulumi.Bool(false),
Nullable: pulumi.Bool(false),
OrdinalPosition: pulumi.Int(0),
},
},
},
},
},
},
},
},
CustomerManagedEncryptionKey: pulumi.String("kms-name"),
}, pulumi.DependsOn([]pulumi.Resource{
keyUser,
}))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.storage.Bucket;
import com.pulumi.gcp.storage.BucketArgs;
import com.pulumi.gcp.storage.BucketIAMMember;
import com.pulumi.gcp.storage.BucketIAMMemberArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileGcsProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigGcsDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllMysqlExcludedObjectsArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject();
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(true)
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
var bucket = new Bucket("bucket", BucketArgs.builder()
.location("US")
.uniformBucketLevelAccess(true)
.build());
var viewer = new BucketIAMMember("viewer", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.objectViewer")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var creator = new BucketIAMMember("creator", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.objectCreator")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var reader = new BucketIAMMember("reader", BucketIAMMemberArgs.builder()
.bucket(bucket.name())
.role("roles/storage.legacyBucketReader")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var keyUser = new CryptoKeyIAMMember("keyUser", CryptoKeyIAMMemberArgs.builder()
.cryptoKeyId("kms-name")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.member(String.format("serviceAccount:service-%s@gcp-sa-datastream.iam.gserviceaccount.com", project.applyValue(getProjectResult -> getProjectResult.number())))
.build());
var destinationConnectionProfile = new ConnectionProfile("destinationConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("destination-profile")
.gcsProfile(ConnectionProfileGcsProfileArgs.builder()
.bucket(bucket.name())
.rootPath("/path")
.build())
.build());
var default_ = new Stream("default", StreamArgs.builder()
.streamId("my-stream")
.desiredState("NOT_STARTED")
.location("us-central1")
.displayName("my stream")
.labels(Map.of("key", "value"))
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig(StreamSourceConfigMysqlSourceConfigArgs.builder()
.includeObjects(StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("includedTable")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("includedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs.builder()
.mysqlDatabases(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("excludedTable")
.mysqlColumns(StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("excludedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build())
.build())
.build())
.maxConcurrentCdcTasks(5)
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile.id())
.gcsDestinationConfig(StreamDestinationConfigGcsDestinationConfigArgs.builder()
.path("mydata")
.fileRotationMb(200)
.fileRotationInterval("60s")
.jsonFileFormat(StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs.builder()
.schemaFileFormat("NO_SCHEMA_FILE")
.compression("GZIP")
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.mysqlExcludedObjects(StreamBackfillAllMysqlExcludedObjectsArgs.builder()
.mysqlDatabases(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs.builder()
.database("my-database")
.mysqlTables(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs.builder()
.table("excludedTable")
.mysqlColumns(StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs.builder()
.column("excludedColumn")
.dataType("VARCHAR")
.collation("utf8mb4")
.primaryKey(false)
.nullable(false)
.ordinalPosition(0)
.build())
.build())
.build())
.build())
.build())
.customerManagedEncryptionKey("kms-name")
.build(), CustomResourceOptions.builder()
.dependsOn(keyUser)
.build());
}
}
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
project = gcp.organizations.get_project()
instance = gcp.sql.DatabaseInstance("instance",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=True)
db = gcp.sql.Database("db", instance=instance.name)
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("sourceConnectionProfile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
bucket = gcp.storage.Bucket("bucket",
location="US",
uniform_bucket_level_access=True)
viewer = gcp.storage.BucketIAMMember("viewer",
bucket=bucket.name,
role="roles/storage.objectViewer",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
creator = gcp.storage.BucketIAMMember("creator",
bucket=bucket.name,
role="roles/storage.objectCreator",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
reader = gcp.storage.BucketIAMMember("reader",
bucket=bucket.name,
role="roles/storage.legacyBucketReader",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
key_user = gcp.kms.CryptoKeyIAMMember("keyUser",
crypto_key_id="kms-name",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
member=f"serviceAccount:service-{project.number}@gcp-sa-datastream.iam.gserviceaccount.com")
destination_connection_profile = gcp.datastream.ConnectionProfile("destinationConnectionProfile",
display_name="Connection profile",
location="us-central1",
connection_profile_id="destination-profile",
gcs_profile=gcp.datastream.ConnectionProfileGcsProfileArgs(
bucket=bucket.name,
root_path="/path",
))
default = gcp.datastream.Stream("default",
stream_id="my-stream",
desired_state="NOT_STARTED",
location="us-central1",
display_name="my stream",
labels={
"key": "value",
},
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(
include_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableArgs(
table="includedTable",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="includedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
)],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs(
mysql_databases=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs(
table="excludedTable",
mysql_columns=[gcp.datastream.StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="excludedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
)],
)],
),
max_concurrent_cdc_tasks=5,
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile.id,
gcs_destination_config=gcp.datastream.StreamDestinationConfigGcsDestinationConfigArgs(
path="mydata",
file_rotation_mb=200,
file_rotation_interval="60s",
json_file_format=gcp.datastream.StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs(
schema_file_format="NO_SCHEMA_FILE",
compression="GZIP",
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
mysql_excluded_objects=gcp.datastream.StreamBackfillAllMysqlExcludedObjectsArgs(
mysql_databases=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs(
database="my-database",
mysql_tables=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs(
table="excludedTable",
mysql_columns=[gcp.datastream.StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs(
column="excludedColumn",
data_type="VARCHAR",
collation="utf8mb4",
primary_key=False,
nullable=False,
ordinal_position=0,
)],
)],
)],
),
),
customer_managed_encryption_key="kms-name",
opts=pulumi.ResourceOptions(depends_on=[key_user]))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const project = gcp.organizations.getProject({});
const instance = new gcp.sql.DatabaseInstance("instance", {
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: true,
});
const db = new gcp.sql.Database("db", {instance: instance.name});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("sourceConnectionProfile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const bucket = new gcp.storage.Bucket("bucket", {
location: "US",
uniformBucketLevelAccess: true,
});
const viewer = new gcp.storage.BucketIAMMember("viewer", {
bucket: bucket.name,
role: "roles/storage.objectViewer",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const creator = new gcp.storage.BucketIAMMember("creator", {
bucket: bucket.name,
role: "roles/storage.objectCreator",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const reader = new gcp.storage.BucketIAMMember("reader", {
bucket: bucket.name,
role: "roles/storage.legacyBucketReader",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const keyUser = new gcp.kms.CryptoKeyIAMMember("keyUser", {
cryptoKeyId: "kms-name",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
member: project.then(project => `serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com`),
});
const destinationConnectionProfile = new gcp.datastream.ConnectionProfile("destinationConnectionProfile", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "destination-profile",
gcsProfile: {
bucket: bucket.name,
rootPath: "/path",
},
});
const _default = new gcp.datastream.Stream("default", {
streamId: "my-stream",
desiredState: "NOT_STARTED",
location: "us-central1",
displayName: "my stream",
labels: {
key: "value",
},
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {
includeObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [{
table: "includedTable",
mysqlColumns: [{
column: "includedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
}],
}],
},
excludeObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [{
table: "excludedTable",
mysqlColumns: [{
column: "excludedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
}],
}],
},
maxConcurrentCdcTasks: 5,
},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile.id,
gcsDestinationConfig: {
path: "mydata",
fileRotationMb: 200,
fileRotationInterval: "60s",
jsonFileFormat: {
schemaFileFormat: "NO_SCHEMA_FILE",
compression: "GZIP",
},
},
},
backfillAll: {
mysqlExcludedObjects: {
mysqlDatabases: [{
database: "my-database",
mysqlTables: [{
table: "excludedTable",
mysqlColumns: [{
column: "excludedColumn",
dataType: "VARCHAR",
collation: "utf8mb4",
primaryKey: false,
nullable: false,
ordinalPosition: 0,
}],
}],
}],
},
},
customerManagedEncryptionKey: "kms-name",
}, {
dependsOn: [keyUser],
});
resources:
instance:
type: gcp:sql:DatabaseInstance
properties:
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: true
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
bucket:
type: gcp:storage:Bucket
properties:
location: US
uniformBucketLevelAccess: true
viewer:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.objectViewer
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
creator:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.objectCreator
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
reader:
type: gcp:storage:BucketIAMMember
properties:
bucket: ${bucket.name}
role: roles/storage.legacyBucketReader
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
keyUser:
type: gcp:kms:CryptoKeyIAMMember
properties:
cryptoKeyId: kms-name
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
member: serviceAccount:service-${project.number}@gcp-sa-datastream.iam.gserviceaccount.com
destinationConnectionProfile:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: destination-profile
gcsProfile:
bucket: ${bucket.name}
rootPath: /path
default:
type: gcp:datastream:Stream
properties:
streamId: my-stream
desiredState: NOT_STARTED
location: us-central1
displayName: my stream
labels:
key: value
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig:
includeObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: includedTable
mysqlColumns:
- column: includedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
excludeObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: excludedTable
mysqlColumns:
- column: excludedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
maxConcurrentCdcTasks: 5
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile.id}
gcsDestinationConfig:
path: mydata
fileRotationMb: 200
fileRotationInterval: 60s
jsonFileFormat:
schemaFileFormat: NO_SCHEMA_FILE
compression: GZIP
backfillAll:
mysqlExcludedObjects:
mysqlDatabases:
- database: my-database
mysqlTables:
- table: excludedTable
mysqlColumns:
- column: excludedColumn
dataType: VARCHAR
collation: utf8mb4
primaryKey: false
nullable: false
ordinalPosition: 0
customerManagedEncryptionKey: kms-name
options:
dependson:
- ${keyUser}
variables:
project:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
Datastream Stream Postgresql
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source = new Gcp.Datastream.ConnectionProfile("source", new()
{
DisplayName = "Postgresql Source",
Location = "us-central1",
ConnectionProfileId = "source-profile",
PostgresqlProfile = new Gcp.Datastream.Inputs.ConnectionProfilePostgresqlProfileArgs
{
Hostname = "hostname",
Port = 3306,
Username = "user",
Password = "pass",
Database = "postgres",
},
});
var destination = new Gcp.Datastream.ConnectionProfile("destination", new()
{
DisplayName = "BigQuery Destination",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var @default = new Gcp.Datastream.Stream("default", new()
{
DisplayName = "Postgres to BigQuery",
Location = "us-central1",
StreamId = "my-stream",
DesiredState = "RUNNING",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = source.Id,
PostgresqlSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigArgs
{
MaxConcurrentBackfillTasks = 12,
Publication = "publication",
ReplicationSlot = "replication_slot",
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destination.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
},
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
PostgresqlExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsArgs
{
PostgresqlSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs
{
Schema = "schema",
PostgresqlTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs
{
Table = "table",
PostgresqlColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datastream"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
source, err := datastream.NewConnectionProfile(ctx, "source", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Postgresql Source"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
PostgresqlProfile: &datastream.ConnectionProfilePostgresqlProfileArgs{
Hostname: pulumi.String("hostname"),
Port: pulumi.Int(3306),
Username: pulumi.String("user"),
Password: pulumi.String("pass"),
Database: pulumi.String("postgres"),
},
})
if err != nil {
return err
}
destination, err := datastream.NewConnectionProfile(ctx, "destination", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("BigQuery Destination"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
DisplayName: pulumi.String("Postgres to BigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("RUNNING"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: source.ID(),
PostgresqlSourceConfig: &datastream.StreamSourceConfigPostgresqlSourceConfigArgs{
MaxConcurrentBackfillTasks: pulumi.Int(12),
Publication: pulumi.String("publication"),
ReplicationSlot: pulumi.String("replication_slot"),
IncludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs{
PostgresqlSchemas: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destination.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
PostgresqlExcludedObjects: &datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs{
PostgresqlSchemas: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs{
Schema: pulumi.String("schema"),
PostgresqlTables: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs{
Table: pulumi.String("table"),
PostgresqlColumns: datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArray{
&datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfilePostgresqlProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllPostgresqlExcludedObjectsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source = new ConnectionProfile("source", ConnectionProfileArgs.builder()
.displayName("Postgresql Source")
.location("us-central1")
.connectionProfileId("source-profile")
.postgresqlProfile(ConnectionProfilePostgresqlProfileArgs.builder()
.hostname("hostname")
.port(3306)
.username("user")
.password("pass")
.database("postgres")
.build())
.build());
var destination = new ConnectionProfile("destination", ConnectionProfileArgs.builder()
.displayName("BigQuery Destination")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var default_ = new Stream("default", StreamArgs.builder()
.displayName("Postgres to BigQuery")
.location("us-central1")
.streamId("my-stream")
.desiredState("RUNNING")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(source.id())
.postgresqlSourceConfig(StreamSourceConfigPostgresqlSourceConfigArgs.builder()
.maxConcurrentBackfillTasks(12)
.publication("publication")
.replicationSlot("replication_slot")
.includeObjects(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs.builder()
.postgresqlSchemas(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destination.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.postgresqlExcludedObjects(StreamBackfillAllPostgresqlExcludedObjectsArgs.builder()
.postgresqlSchemas(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs.builder()
.schema("schema")
.postgresqlTables(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs.builder()
.table("table")
.postgresqlColumns(StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build());
}
}
import pulumi
import pulumi_gcp as gcp
source = gcp.datastream.ConnectionProfile("source",
display_name="Postgresql Source",
location="us-central1",
connection_profile_id="source-profile",
postgresql_profile=gcp.datastream.ConnectionProfilePostgresqlProfileArgs(
hostname="hostname",
port=3306,
username="user",
password="pass",
database="postgres",
))
destination = gcp.datastream.ConnectionProfile("destination",
display_name="BigQuery Destination",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
default = gcp.datastream.Stream("default",
display_name="Postgres to BigQuery",
location="us-central1",
stream_id="my-stream",
desired_state="RUNNING",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source.id,
postgresql_source_config=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigArgs(
max_concurrent_backfill_tasks=12,
publication="publication",
replication_slot="replication_slot",
include_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigIncludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamSourceConfigPostgresqlSourceConfigExcludeObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
),
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
postgresql_excluded_objects=gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsArgs(
postgresql_schemas=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs(
schema="schema",
postgresql_tables=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs(
table="table",
postgresql_columns=[gcp.datastream.StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs(
column="column",
)],
)],
)],
),
))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source = new gcp.datastream.ConnectionProfile("source", {
displayName: "Postgresql Source",
location: "us-central1",
connectionProfileId: "source-profile",
postgresqlProfile: {
hostname: "hostname",
port: 3306,
username: "user",
password: "pass",
database: "postgres",
},
});
const destination = new gcp.datastream.ConnectionProfile("destination", {
displayName: "BigQuery Destination",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const _default = new gcp.datastream.Stream("default", {
displayName: "Postgres to BigQuery",
location: "us-central1",
streamId: "my-stream",
desiredState: "RUNNING",
sourceConfig: {
sourceConnectionProfile: source.id,
postgresqlSourceConfig: {
maxConcurrentBackfillTasks: 12,
publication: "publication",
replicationSlot: "replication_slot",
includeObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
excludeObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
},
},
destinationConfig: {
destinationConnectionProfile: destination.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
},
},
},
},
backfillAll: {
postgresqlExcludedObjects: {
postgresqlSchemas: [{
schema: "schema",
postgresqlTables: [{
table: "table",
postgresqlColumns: [{
column: "column",
}],
}],
}],
},
},
});
resources:
source:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Postgresql Source
location: us-central1
connectionProfileId: source-profile
postgresqlProfile:
hostname: hostname
port: 3306
username: user
password: pass
database: postgres
destination:
type: gcp:datastream:ConnectionProfile
properties:
displayName: BigQuery Destination
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
default:
type: gcp:datastream:Stream
properties:
displayName: Postgres to BigQuery
location: us-central1
streamId: my-stream
desiredState: RUNNING
sourceConfig:
sourceConnectionProfile: ${source.id}
postgresqlSourceConfig:
maxConcurrentBackfillTasks: 12
publication: publication
replicationSlot: replication_slot
includeObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
excludeObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
destinationConfig:
destinationConnectionProfile: ${destination.id}
bigqueryDestinationConfig:
dataFreshness: 900s
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
backfillAll:
postgresqlExcludedObjects:
postgresqlSchemas:
- schema: schema
postgresqlTables:
- table: table
postgresqlColumns:
- column: column
Datastream Stream Oracle
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
return await Deployment.RunAsync(() =>
{
var source = new Gcp.Datastream.ConnectionProfile("source", new()
{
DisplayName = "Oracle Source",
Location = "us-central1",
ConnectionProfileId = "source-profile",
OracleProfile = new Gcp.Datastream.Inputs.ConnectionProfileOracleProfileArgs
{
Hostname = "hostname",
Port = 1521,
Username = "user",
Password = "pass",
DatabaseService = "ORCL",
},
});
var destination = new Gcp.Datastream.ConnectionProfile("destination", new()
{
DisplayName = "BigQuery Destination",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var stream5 = new Gcp.Datastream.Stream("stream5", new()
{
DisplayName = "Oracle to BigQuery",
Location = "us-central1",
StreamId = "my-stream",
DesiredState = "RUNNING",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = source.Id,
OracleSourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigArgs
{
MaxConcurrentCdcTasks = 8,
MaxConcurrentBackfillTasks = 12,
IncludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
ExcludeObjects = new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
DropLargeObjects = null,
},
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destination.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
},
},
},
},
BackfillAll = new Gcp.Datastream.Inputs.StreamBackfillAllArgs
{
OracleExcludedObjects = new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsArgs
{
OracleSchemas = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs
{
Schema = "schema",
OracleTables = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs
{
Table = "table",
OracleColumns = new[]
{
new Gcp.Datastream.Inputs.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs
{
Column = "column",
},
},
},
},
},
},
},
},
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datastream"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
source, err := datastream.NewConnectionProfile(ctx, "source", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Oracle Source"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
OracleProfile: &datastream.ConnectionProfileOracleProfileArgs{
Hostname: pulumi.String("hostname"),
Port: pulumi.Int(1521),
Username: pulumi.String("user"),
Password: pulumi.String("pass"),
DatabaseService: pulumi.String("ORCL"),
},
})
if err != nil {
return err
}
destination, err := datastream.NewConnectionProfile(ctx, "destination", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("BigQuery Destination"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "stream5", &datastream.StreamArgs{
DisplayName: pulumi.String("Oracle to BigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("my-stream"),
DesiredState: pulumi.String("RUNNING"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: source.ID(),
OracleSourceConfig: &datastream.StreamSourceConfigOracleSourceConfigArgs{
MaxConcurrentCdcTasks: pulumi.Int(8),
MaxConcurrentBackfillTasks: pulumi.Int(12),
IncludeObjects: &datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
ExcludeObjects: &datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs{
OracleSchemas: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
DropLargeObjects: nil,
},
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destination.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
},
},
},
},
BackfillAll: &datastream.StreamBackfillAllArgs{
OracleExcludedObjects: &datastream.StreamBackfillAllOracleExcludedObjectsArgs{
OracleSchemas: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs{
Schema: pulumi.String("schema"),
OracleTables: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs{
Table: pulumi.String("table"),
OracleColumns: datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArray{
&datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs{
Column: pulumi.String("column"),
},
},
},
},
},
},
},
},
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileOracleProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigOracleSourceConfigDropLargeObjectsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllOracleExcludedObjectsArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var source = new ConnectionProfile("source", ConnectionProfileArgs.builder()
.displayName("Oracle Source")
.location("us-central1")
.connectionProfileId("source-profile")
.oracleProfile(ConnectionProfileOracleProfileArgs.builder()
.hostname("hostname")
.port(1521)
.username("user")
.password("pass")
.databaseService("ORCL")
.build())
.build());
var destination = new ConnectionProfile("destination", ConnectionProfileArgs.builder()
.displayName("BigQuery Destination")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var stream5 = new Stream("stream5", StreamArgs.builder()
.displayName("Oracle to BigQuery")
.location("us-central1")
.streamId("my-stream")
.desiredState("RUNNING")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(source.id())
.oracleSourceConfig(StreamSourceConfigOracleSourceConfigArgs.builder()
.maxConcurrentCdcTasks(8)
.maxConcurrentBackfillTasks(12)
.includeObjects(StreamSourceConfigOracleSourceConfigIncludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.excludeObjects(StreamSourceConfigOracleSourceConfigExcludeObjectsArgs.builder()
.oracleSchemas(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.dropLargeObjects()
.build())
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destination.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.build())
.build())
.build())
.build())
.backfillAll(StreamBackfillAllArgs.builder()
.oracleExcludedObjects(StreamBackfillAllOracleExcludedObjectsArgs.builder()
.oracleSchemas(StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs.builder()
.schema("schema")
.oracleTables(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs.builder()
.table("table")
.oracleColumns(StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs.builder()
.column("column")
.build())
.build())
.build())
.build())
.build())
.build());
}
}
import pulumi
import pulumi_gcp as gcp
source = gcp.datastream.ConnectionProfile("source",
display_name="Oracle Source",
location="us-central1",
connection_profile_id="source-profile",
oracle_profile=gcp.datastream.ConnectionProfileOracleProfileArgs(
hostname="hostname",
port=1521,
username="user",
password="pass",
database_service="ORCL",
))
destination = gcp.datastream.ConnectionProfile("destination",
display_name="BigQuery Destination",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
stream5 = gcp.datastream.Stream("stream5",
display_name="Oracle to BigQuery",
location="us-central1",
stream_id="my-stream",
desired_state="RUNNING",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source.id,
oracle_source_config=gcp.datastream.StreamSourceConfigOracleSourceConfigArgs(
max_concurrent_cdc_tasks=8,
max_concurrent_backfill_tasks=12,
include_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigIncludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
exclude_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsArgs(
oracle_schemas=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamSourceConfigOracleSourceConfigExcludeObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
drop_large_objects=gcp.datastream.StreamSourceConfigOracleSourceConfigDropLargeObjectsArgs(),
),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
),
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs(
oracle_excluded_objects=gcp.datastream.StreamBackfillAllOracleExcludedObjectsArgs(
oracle_schemas=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs(
schema="schema",
oracle_tables=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs(
table="table",
oracle_columns=[gcp.datastream.StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs(
column="column",
)],
)],
)],
),
))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
const source = new gcp.datastream.ConnectionProfile("source", {
displayName: "Oracle Source",
location: "us-central1",
connectionProfileId: "source-profile",
oracleProfile: {
hostname: "hostname",
port: 1521,
username: "user",
password: "pass",
databaseService: "ORCL",
},
});
const destination = new gcp.datastream.ConnectionProfile("destination", {
displayName: "BigQuery Destination",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const stream5 = new gcp.datastream.Stream("stream5", {
displayName: "Oracle to BigQuery",
location: "us-central1",
streamId: "my-stream",
desiredState: "RUNNING",
sourceConfig: {
sourceConnectionProfile: source.id,
oracleSourceConfig: {
maxConcurrentCdcTasks: 8,
maxConcurrentBackfillTasks: 12,
includeObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
excludeObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
dropLargeObjects: {},
},
},
destinationConfig: {
destinationConnectionProfile: destination.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
},
},
},
},
backfillAll: {
oracleExcludedObjects: {
oracleSchemas: [{
schema: "schema",
oracleTables: [{
table: "table",
oracleColumns: [{
column: "column",
}],
}],
}],
},
},
});
resources:
source:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Oracle Source
location: us-central1
connectionProfileId: source-profile
oracleProfile:
hostname: hostname
port: 1521
username: user
password: pass
databaseService: ORCL
destination:
type: gcp:datastream:ConnectionProfile
properties:
displayName: BigQuery Destination
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
stream5:
type: gcp:datastream:Stream
properties:
displayName: Oracle to BigQuery
location: us-central1
streamId: my-stream
desiredState: RUNNING
sourceConfig:
sourceConnectionProfile: ${source.id}
oracleSourceConfig:
maxConcurrentCdcTasks: 8
maxConcurrentBackfillTasks: 12
includeObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
excludeObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
dropLargeObjects: {}
destinationConfig:
destinationConnectionProfile: ${destination.id}
bigqueryDestinationConfig:
dataFreshness: 900s
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
backfillAll:
oracleExcludedObjects:
oracleSchemas:
- schema: schema
oracleTables:
- table: table
oracleColumns:
- column: column
Datastream Stream Postgresql Bigquery Dataset Id
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var postgres = new Gcp.BigQuery.Dataset("postgres", new()
{
DatasetId = "postgres",
FriendlyName = "postgres",
Description = "Database of postgres",
Location = "us-central1",
});
var destinationConnectionProfile2 = new Gcp.Datastream.ConnectionProfile("destinationConnectionProfile2", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "dest-profile",
BigqueryProfile = null,
});
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = false,
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("sourceConnectionProfile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var @default = new Gcp.Datastream.Stream("default", new()
{
DisplayName = "postgres to bigQuery",
Location = "us-central1",
StreamId = "postgres-bigquery",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = null,
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile2.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
DataFreshness = "900s",
SingleTargetDataset = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs
{
DatasetId = postgres.Id,
},
},
},
BackfillAll = null,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
});
});
package main
import (
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/sql"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
postgres, err := bigquery.NewDataset(ctx, "postgres", &bigquery.DatasetArgs{
DatasetId: pulumi.String("postgres"),
FriendlyName: pulumi.String("postgres"),
Description: pulumi.String("Database of postgres"),
Location: pulumi.String("us-central1"),
})
if err != nil {
return err
}
destinationConnectionProfile2, err := datastream.NewConnectionProfile(ctx, "destinationConnectionProfile2", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("dest-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(false),
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "sourceConnectionProfile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
DisplayName: pulumi.String("postgres to bigQuery"),
Location: pulumi.String("us-central1"),
StreamId: pulumi.String("postgres-bigquery"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: nil,
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile2.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
DataFreshness: pulumi.String("900s"),
SingleTargetDataset: &datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs{
DatasetId: postgres.ID(),
},
},
},
BackfillAll: nil,
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.bigquery.Dataset;
import com.pulumi.gcp.bigquery.DatasetArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillAllArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var postgres = new Dataset("postgres", DatasetArgs.builder()
.datasetId("postgres")
.friendlyName("postgres")
.description("Database of postgres")
.location("us-central1")
.build());
var destinationConnectionProfile2 = new ConnectionProfile("destinationConnectionProfile2", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("dest-profile")
.bigqueryProfile()
.build());
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(false)
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
var default_ = new Stream("default", StreamArgs.builder()
.displayName("postgres to bigQuery")
.location("us-central1")
.streamId("postgres-bigquery")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig()
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile2.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.dataFreshness("900s")
.singleTargetDataset(StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs.builder()
.datasetId(postgres.id())
.build())
.build())
.build())
.backfillAll()
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.build());
}
}
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
postgres = gcp.bigquery.Dataset("postgres",
dataset_id="postgres",
friendly_name="postgres",
description="Database of postgres",
location="us-central1")
destination_connection_profile2 = gcp.datastream.ConnectionProfile("destinationConnectionProfile2",
display_name="Connection profile",
location="us-central1",
connection_profile_id="dest-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
instance = gcp.sql.DatabaseInstance("instance",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=False)
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("sourceConnectionProfile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
default = gcp.datastream.Stream("default",
display_name="postgres to bigQuery",
location="us-central1",
stream_id="postgres-bigquery",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile2.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
data_freshness="900s",
single_target_dataset=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs(
dataset_id=postgres.id,
),
),
),
backfill_all=gcp.datastream.StreamBackfillAllArgs())
db = gcp.sql.Database("db", instance=instance.name)
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const postgres = new gcp.bigquery.Dataset("postgres", {
datasetId: "postgres",
friendlyName: "postgres",
description: "Database of postgres",
location: "us-central1",
});
const destinationConnectionProfile2 = new gcp.datastream.ConnectionProfile("destinationConnectionProfile2", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "dest-profile",
bigqueryProfile: {},
});
const instance = new gcp.sql.DatabaseInstance("instance", {
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: false,
});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("sourceConnectionProfile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const _default = new gcp.datastream.Stream("default", {
displayName: "postgres to bigQuery",
location: "us-central1",
streamId: "postgres-bigquery",
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile2.id,
bigqueryDestinationConfig: {
dataFreshness: "900s",
singleTargetDataset: {
datasetId: postgres.id,
},
},
},
backfillAll: {},
});
const db = new gcp.sql.Database("db", {instance: instance.name});
resources:
postgres:
type: gcp:bigquery:Dataset
properties:
datasetId: postgres
friendlyName: postgres
description: Database of postgres
location: us-central1
default:
type: gcp:datastream:Stream
properties:
displayName: postgres to bigQuery
location: us-central1
streamId: postgres-bigquery
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig: {}
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile2.id}
bigqueryDestinationConfig:
dataFreshness: 900s
singleTargetDataset:
datasetId: ${postgres.id}
backfillAll: {}
destinationConnectionProfile2:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: dest-profile
bigqueryProfile: {}
instance:
type: gcp:sql:DatabaseInstance
properties:
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: false
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
Datastream Stream Bigquery
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Gcp = Pulumi.Gcp;
using Random = Pulumi.Random;
return await Deployment.RunAsync(() =>
{
var project = Gcp.Organizations.GetProject.Invoke();
var instance = new Gcp.Sql.DatabaseInstance("instance", new()
{
DatabaseVersion = "MYSQL_8_0",
Region = "us-central1",
Settings = new Gcp.Sql.Inputs.DatabaseInstanceSettingsArgs
{
Tier = "db-f1-micro",
BackupConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsBackupConfigurationArgs
{
Enabled = true,
BinaryLogEnabled = true,
},
IpConfiguration = new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationArgs
{
AuthorizedNetworks = new[]
{
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.71.242.81",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.28.29",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.6.157",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.67.234.134",
},
new Gcp.Sql.Inputs.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs
{
Value = "34.72.239.218",
},
},
},
},
DeletionProtection = true,
});
var db = new Gcp.Sql.Database("db", new()
{
Instance = instance.Name,
});
var pwd = new Random.RandomPassword("pwd", new()
{
Length = 16,
Special = false,
});
var user = new Gcp.Sql.User("user", new()
{
Instance = instance.Name,
Host = "%",
Password = pwd.Result,
});
var sourceConnectionProfile = new Gcp.Datastream.ConnectionProfile("sourceConnectionProfile", new()
{
DisplayName = "Source connection profile",
Location = "us-central1",
ConnectionProfileId = "source-profile",
MysqlProfile = new Gcp.Datastream.Inputs.ConnectionProfileMysqlProfileArgs
{
Hostname = instance.PublicIpAddress,
Username = user.Name,
Password = user.Password,
},
});
var bqSa = Gcp.BigQuery.GetDefaultServiceAccount.Invoke();
var bigqueryKeyUser = new Gcp.Kms.CryptoKeyIAMMember("bigqueryKeyUser", new()
{
CryptoKeyId = "bigquery-kms-name",
Role = "roles/cloudkms.cryptoKeyEncrypterDecrypter",
Member = $"serviceAccount:{bqSa.Apply(getDefaultServiceAccountResult => getDefaultServiceAccountResult.Email)}",
});
var destinationConnectionProfile = new Gcp.Datastream.ConnectionProfile("destinationConnectionProfile", new()
{
DisplayName = "Connection profile",
Location = "us-central1",
ConnectionProfileId = "destination-profile",
BigqueryProfile = null,
});
var @default = new Gcp.Datastream.Stream("default", new()
{
StreamId = "my-stream",
Location = "us-central1",
DisplayName = "my stream",
SourceConfig = new Gcp.Datastream.Inputs.StreamSourceConfigArgs
{
SourceConnectionProfile = sourceConnectionProfile.Id,
MysqlSourceConfig = null,
},
DestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigArgs
{
DestinationConnectionProfile = destinationConnectionProfile.Id,
BigqueryDestinationConfig = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigArgs
{
SourceHierarchyDatasets = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
{
DatasetTemplate = new Gcp.Datastream.Inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
{
Location = "us-central1",
KmsKeyName = "bigquery-kms-name",
},
},
},
},
BackfillNone = null,
}, new CustomResourceOptions
{
DependsOn = new[]
{
bigqueryKeyUser,
},
});
});
package main
import (
"fmt"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/bigquery"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/datastream"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/kms"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/organizations"
"github.com/pulumi/pulumi-gcp/sdk/v6/go/gcp/sql"
"github.com/pulumi/pulumi-random/sdk/v4/go/random"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := organizations.LookupProject(ctx, nil, nil)
if err != nil {
return err
}
instance, err := sql.NewDatabaseInstance(ctx, "instance", &sql.DatabaseInstanceArgs{
DatabaseVersion: pulumi.String("MYSQL_8_0"),
Region: pulumi.String("us-central1"),
Settings: &sql.DatabaseInstanceSettingsArgs{
Tier: pulumi.String("db-f1-micro"),
BackupConfiguration: &sql.DatabaseInstanceSettingsBackupConfigurationArgs{
Enabled: pulumi.Bool(true),
BinaryLogEnabled: pulumi.Bool(true),
},
IpConfiguration: &sql.DatabaseInstanceSettingsIpConfigurationArgs{
AuthorizedNetworks: sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArray{
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.71.242.81"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.28.29"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.6.157"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.67.234.134"),
},
&sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs{
Value: pulumi.String("34.72.239.218"),
},
},
},
},
DeletionProtection: pulumi.Bool(true),
})
if err != nil {
return err
}
_, err = sql.NewDatabase(ctx, "db", &sql.DatabaseArgs{
Instance: instance.Name,
})
if err != nil {
return err
}
pwd, err := random.NewRandomPassword(ctx, "pwd", &random.RandomPasswordArgs{
Length: pulumi.Int(16),
Special: pulumi.Bool(false),
})
if err != nil {
return err
}
user, err := sql.NewUser(ctx, "user", &sql.UserArgs{
Instance: instance.Name,
Host: pulumi.String("%"),
Password: pwd.Result,
})
if err != nil {
return err
}
sourceConnectionProfile, err := datastream.NewConnectionProfile(ctx, "sourceConnectionProfile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Source connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("source-profile"),
MysqlProfile: &datastream.ConnectionProfileMysqlProfileArgs{
Hostname: instance.PublicIpAddress,
Username: user.Name,
Password: user.Password,
},
})
if err != nil {
return err
}
bqSa, err := bigquery.GetDefaultServiceAccount(ctx, nil, nil)
if err != nil {
return err
}
bigqueryKeyUser, err := kms.NewCryptoKeyIAMMember(ctx, "bigqueryKeyUser", &kms.CryptoKeyIAMMemberArgs{
CryptoKeyId: pulumi.String("bigquery-kms-name"),
Role: pulumi.String("roles/cloudkms.cryptoKeyEncrypterDecrypter"),
Member: pulumi.String(fmt.Sprintf("serviceAccount:%v", bqSa.Email)),
})
if err != nil {
return err
}
destinationConnectionProfile, err := datastream.NewConnectionProfile(ctx, "destinationConnectionProfile", &datastream.ConnectionProfileArgs{
DisplayName: pulumi.String("Connection profile"),
Location: pulumi.String("us-central1"),
ConnectionProfileId: pulumi.String("destination-profile"),
BigqueryProfile: nil,
})
if err != nil {
return err
}
_, err = datastream.NewStream(ctx, "default", &datastream.StreamArgs{
StreamId: pulumi.String("my-stream"),
Location: pulumi.String("us-central1"),
DisplayName: pulumi.String("my stream"),
SourceConfig: &datastream.StreamSourceConfigArgs{
SourceConnectionProfile: sourceConnectionProfile.ID(),
MysqlSourceConfig: nil,
},
DestinationConfig: &datastream.StreamDestinationConfigArgs{
DestinationConnectionProfile: destinationConnectionProfile.ID(),
BigqueryDestinationConfig: &datastream.StreamDestinationConfigBigqueryDestinationConfigArgs{
SourceHierarchyDatasets: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs{
DatasetTemplate: &datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs{
Location: pulumi.String("us-central1"),
KmsKeyName: pulumi.String("bigquery-kms-name"),
},
},
},
},
BackfillNone: nil,
}, pulumi.DependsOn([]pulumi.Resource{
bigqueryKeyUser,
}))
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.gcp.organizations.OrganizationsFunctions;
import com.pulumi.gcp.organizations.inputs.GetProjectArgs;
import com.pulumi.gcp.sql.DatabaseInstance;
import com.pulumi.gcp.sql.DatabaseInstanceArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsBackupConfigurationArgs;
import com.pulumi.gcp.sql.inputs.DatabaseInstanceSettingsIpConfigurationArgs;
import com.pulumi.gcp.sql.Database;
import com.pulumi.gcp.sql.DatabaseArgs;
import com.pulumi.random.RandomPassword;
import com.pulumi.random.RandomPasswordArgs;
import com.pulumi.gcp.sql.User;
import com.pulumi.gcp.sql.UserArgs;
import com.pulumi.gcp.datastream.ConnectionProfile;
import com.pulumi.gcp.datastream.ConnectionProfileArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileMysqlProfileArgs;
import com.pulumi.gcp.bigquery.BigqueryFunctions;
import com.pulumi.gcp.bigquery.inputs.GetDefaultServiceAccountArgs;
import com.pulumi.gcp.kms.CryptoKeyIAMMember;
import com.pulumi.gcp.kms.CryptoKeyIAMMemberArgs;
import com.pulumi.gcp.datastream.inputs.ConnectionProfileBigqueryProfileArgs;
import com.pulumi.gcp.datastream.Stream;
import com.pulumi.gcp.datastream.StreamArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamSourceConfigMysqlSourceConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs;
import com.pulumi.gcp.datastream.inputs.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs;
import com.pulumi.gcp.datastream.inputs.StreamBackfillNoneArgs;
import com.pulumi.resources.CustomResourceOptions;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var project = OrganizationsFunctions.getProject();
var instance = new DatabaseInstance("instance", DatabaseInstanceArgs.builder()
.databaseVersion("MYSQL_8_0")
.region("us-central1")
.settings(DatabaseInstanceSettingsArgs.builder()
.tier("db-f1-micro")
.backupConfiguration(DatabaseInstanceSettingsBackupConfigurationArgs.builder()
.enabled(true)
.binaryLogEnabled(true)
.build())
.ipConfiguration(DatabaseInstanceSettingsIpConfigurationArgs.builder()
.authorizedNetworks(
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.71.242.81")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.28.29")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.6.157")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.67.234.134")
.build(),
DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs.builder()
.value("34.72.239.218")
.build())
.build())
.build())
.deletionProtection(true)
.build());
var db = new Database("db", DatabaseArgs.builder()
.instance(instance.name())
.build());
var pwd = new RandomPassword("pwd", RandomPasswordArgs.builder()
.length(16)
.special(false)
.build());
var user = new User("user", UserArgs.builder()
.instance(instance.name())
.host("%")
.password(pwd.result())
.build());
var sourceConnectionProfile = new ConnectionProfile("sourceConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Source connection profile")
.location("us-central1")
.connectionProfileId("source-profile")
.mysqlProfile(ConnectionProfileMysqlProfileArgs.builder()
.hostname(instance.publicIpAddress())
.username(user.name())
.password(user.password())
.build())
.build());
final var bqSa = BigqueryFunctions.getDefaultServiceAccount();
var bigqueryKeyUser = new CryptoKeyIAMMember("bigqueryKeyUser", CryptoKeyIAMMemberArgs.builder()
.cryptoKeyId("bigquery-kms-name")
.role("roles/cloudkms.cryptoKeyEncrypterDecrypter")
.member(String.format("serviceAccount:%s", bqSa.applyValue(getDefaultServiceAccountResult -> getDefaultServiceAccountResult.email())))
.build());
var destinationConnectionProfile = new ConnectionProfile("destinationConnectionProfile", ConnectionProfileArgs.builder()
.displayName("Connection profile")
.location("us-central1")
.connectionProfileId("destination-profile")
.bigqueryProfile()
.build());
var default_ = new Stream("default", StreamArgs.builder()
.streamId("my-stream")
.location("us-central1")
.displayName("my stream")
.sourceConfig(StreamSourceConfigArgs.builder()
.sourceConnectionProfile(sourceConnectionProfile.id())
.mysqlSourceConfig()
.build())
.destinationConfig(StreamDestinationConfigArgs.builder()
.destinationConnectionProfile(destinationConnectionProfile.id())
.bigqueryDestinationConfig(StreamDestinationConfigBigqueryDestinationConfigArgs.builder()
.sourceHierarchyDatasets(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs.builder()
.datasetTemplate(StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs.builder()
.location("us-central1")
.kmsKeyName("bigquery-kms-name")
.build())
.build())
.build())
.build())
.backfillNone()
.build(), CustomResourceOptions.builder()
.dependsOn(bigqueryKeyUser)
.build());
}
}
import pulumi
import pulumi_gcp as gcp
import pulumi_random as random
project = gcp.organizations.get_project()
instance = gcp.sql.DatabaseInstance("instance",
database_version="MYSQL_8_0",
region="us-central1",
settings=gcp.sql.DatabaseInstanceSettingsArgs(
tier="db-f1-micro",
backup_configuration=gcp.sql.DatabaseInstanceSettingsBackupConfigurationArgs(
enabled=True,
binary_log_enabled=True,
),
ip_configuration=gcp.sql.DatabaseInstanceSettingsIpConfigurationArgs(
authorized_networks=[
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.71.242.81",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.28.29",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.6.157",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.67.234.134",
),
gcp.sql.DatabaseInstanceSettingsIpConfigurationAuthorizedNetworkArgs(
value="34.72.239.218",
),
],
),
),
deletion_protection=True)
db = gcp.sql.Database("db", instance=instance.name)
pwd = random.RandomPassword("pwd",
length=16,
special=False)
user = gcp.sql.User("user",
instance=instance.name,
host="%",
password=pwd.result)
source_connection_profile = gcp.datastream.ConnectionProfile("sourceConnectionProfile",
display_name="Source connection profile",
location="us-central1",
connection_profile_id="source-profile",
mysql_profile=gcp.datastream.ConnectionProfileMysqlProfileArgs(
hostname=instance.public_ip_address,
username=user.name,
password=user.password,
))
bq_sa = gcp.bigquery.get_default_service_account()
bigquery_key_user = gcp.kms.CryptoKeyIAMMember("bigqueryKeyUser",
crypto_key_id="bigquery-kms-name",
role="roles/cloudkms.cryptoKeyEncrypterDecrypter",
member=f"serviceAccount:{bq_sa.email}")
destination_connection_profile = gcp.datastream.ConnectionProfile("destinationConnectionProfile",
display_name="Connection profile",
location="us-central1",
connection_profile_id="destination-profile",
bigquery_profile=gcp.datastream.ConnectionProfileBigqueryProfileArgs())
default = gcp.datastream.Stream("default",
stream_id="my-stream",
location="us-central1",
display_name="my stream",
source_config=gcp.datastream.StreamSourceConfigArgs(
source_connection_profile=source_connection_profile.id,
mysql_source_config=gcp.datastream.StreamSourceConfigMysqlSourceConfigArgs(),
),
destination_config=gcp.datastream.StreamDestinationConfigArgs(
destination_connection_profile=destination_connection_profile.id,
bigquery_destination_config=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigArgs(
source_hierarchy_datasets=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs(
dataset_template=gcp.datastream.StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs(
location="us-central1",
kms_key_name="bigquery-kms-name",
),
),
),
),
backfill_none=gcp.datastream.StreamBackfillNoneArgs(),
opts=pulumi.ResourceOptions(depends_on=[bigquery_key_user]))
import * as pulumi from "@pulumi/pulumi";
import * as gcp from "@pulumi/gcp";
import * as random from "@pulumi/random";
const project = gcp.organizations.getProject({});
const instance = new gcp.sql.DatabaseInstance("instance", {
databaseVersion: "MYSQL_8_0",
region: "us-central1",
settings: {
tier: "db-f1-micro",
backupConfiguration: {
enabled: true,
binaryLogEnabled: true,
},
ipConfiguration: {
authorizedNetworks: [
{
value: "34.71.242.81",
},
{
value: "34.72.28.29",
},
{
value: "34.67.6.157",
},
{
value: "34.67.234.134",
},
{
value: "34.72.239.218",
},
],
},
},
deletionProtection: true,
});
const db = new gcp.sql.Database("db", {instance: instance.name});
const pwd = new random.RandomPassword("pwd", {
length: 16,
special: false,
});
const user = new gcp.sql.User("user", {
instance: instance.name,
host: "%",
password: pwd.result,
});
const sourceConnectionProfile = new gcp.datastream.ConnectionProfile("sourceConnectionProfile", {
displayName: "Source connection profile",
location: "us-central1",
connectionProfileId: "source-profile",
mysqlProfile: {
hostname: instance.publicIpAddress,
username: user.name,
password: user.password,
},
});
const bqSa = gcp.bigquery.getDefaultServiceAccount({});
const bigqueryKeyUser = new gcp.kms.CryptoKeyIAMMember("bigqueryKeyUser", {
cryptoKeyId: "bigquery-kms-name",
role: "roles/cloudkms.cryptoKeyEncrypterDecrypter",
member: bqSa.then(bqSa => `serviceAccount:${bqSa.email}`),
});
const destinationConnectionProfile = new gcp.datastream.ConnectionProfile("destinationConnectionProfile", {
displayName: "Connection profile",
location: "us-central1",
connectionProfileId: "destination-profile",
bigqueryProfile: {},
});
const _default = new gcp.datastream.Stream("default", {
streamId: "my-stream",
location: "us-central1",
displayName: "my stream",
sourceConfig: {
sourceConnectionProfile: sourceConnectionProfile.id,
mysqlSourceConfig: {},
},
destinationConfig: {
destinationConnectionProfile: destinationConnectionProfile.id,
bigqueryDestinationConfig: {
sourceHierarchyDatasets: {
datasetTemplate: {
location: "us-central1",
kmsKeyName: "bigquery-kms-name",
},
},
},
},
backfillNone: {},
}, {
dependsOn: [bigqueryKeyUser],
});
resources:
instance:
type: gcp:sql:DatabaseInstance
properties:
databaseVersion: MYSQL_8_0
region: us-central1
settings:
tier: db-f1-micro
backupConfiguration:
enabled: true
binaryLogEnabled: true
ipConfiguration:
authorizedNetworks:
- value: 34.71.242.81
- value: 34.72.28.29
- value: 34.67.6.157
- value: 34.67.234.134
- value: 34.72.239.218
deletionProtection: true
db:
type: gcp:sql:Database
properties:
instance: ${instance.name}
pwd:
type: random:RandomPassword
properties:
length: 16
special: false
user:
type: gcp:sql:User
properties:
instance: ${instance.name}
host: '%'
password: ${pwd.result}
sourceConnectionProfile:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Source connection profile
location: us-central1
connectionProfileId: source-profile
mysqlProfile:
hostname: ${instance.publicIpAddress}
username: ${user.name}
password: ${user.password}
bigqueryKeyUser:
type: gcp:kms:CryptoKeyIAMMember
properties:
cryptoKeyId: bigquery-kms-name
role: roles/cloudkms.cryptoKeyEncrypterDecrypter
member: serviceAccount:${bqSa.email}
destinationConnectionProfile:
type: gcp:datastream:ConnectionProfile
properties:
displayName: Connection profile
location: us-central1
connectionProfileId: destination-profile
bigqueryProfile: {}
default:
type: gcp:datastream:Stream
properties:
streamId: my-stream
location: us-central1
displayName: my stream
sourceConfig:
sourceConnectionProfile: ${sourceConnectionProfile.id}
mysqlSourceConfig: {}
destinationConfig:
destinationConnectionProfile: ${destinationConnectionProfile.id}
bigqueryDestinationConfig:
sourceHierarchyDatasets:
datasetTemplate:
location: us-central1
kmsKeyName: bigquery-kms-name
backfillNone: {}
options:
dependson:
- ${bigqueryKeyUser}
variables:
project:
fn::invoke:
Function: gcp:organizations:getProject
Arguments: {}
bqSa:
fn::invoke:
Function: gcp:bigquery:getDefaultServiceAccount
Arguments: {}
Create Stream Resource
new Stream(name: string, args: StreamArgs, opts?: CustomResourceOptions);
@overload
def Stream(resource_name: str,
opts: Optional[ResourceOptions] = None,
backfill_all: Optional[StreamBackfillAllArgs] = None,
backfill_none: Optional[StreamBackfillNoneArgs] = None,
customer_managed_encryption_key: Optional[str] = None,
desired_state: Optional[str] = None,
destination_config: Optional[StreamDestinationConfigArgs] = None,
display_name: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
project: Optional[str] = None,
source_config: Optional[StreamSourceConfigArgs] = None,
stream_id: Optional[str] = None)
@overload
def Stream(resource_name: str,
args: StreamArgs,
opts: Optional[ResourceOptions] = None)
func NewStream(ctx *Context, name string, args StreamArgs, opts ...ResourceOption) (*Stream, error)
public Stream(string name, StreamArgs args, CustomResourceOptions? opts = null)
public Stream(String name, StreamArgs args)
public Stream(String name, StreamArgs args, CustomResourceOptions options)
type: gcp:datastream:Stream
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args StreamArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Stream Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The Stream resource accepts the following input properties:
- Destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- Display
Name string Display name.
- Location string
The name of the location this stream is located in.
- Source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- Stream
Id string The stream identifier.
- Backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- Backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- Customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- Labels Dictionary<string, string>
Labels.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Destination
Config StreamDestination Config Args Destination connection profile configuration. Structure is documented below.
- Display
Name string Display name.
- Location string
The name of the location this stream is located in.
- Source
Config StreamSource Config Args Source connection profile configuration. Structure is documented below.
- Stream
Id string The stream identifier.
- Backfill
All StreamBackfill All Args Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- Backfill
None StreamBackfill None Args Backfill strategy to disable automatic backfill for the Stream's objects.
- Customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- Labels map[string]string
Labels.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- display
Name String Display name.
- location String
The name of the location this stream is located in.
- source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- stream
Id String The stream identifier.
- backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed StringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- labels Map<String,String>
Labels.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- display
Name string Display name.
- location string
The name of the location this stream is located in.
- source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- stream
Id string The stream identifier.
- backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- labels {[key: string]: string}
Labels.
- project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- destination_
config StreamDestination Config Args Destination connection profile configuration. Structure is documented below.
- display_
name str Display name.
- location str
The name of the location this stream is located in.
- source_
config StreamSource Config Args Source connection profile configuration. Structure is documented below.
- stream_
id str The stream identifier.
- backfill_
all StreamBackfill All Args Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill_
none StreamBackfill None Args Backfill strategy to disable automatic backfill for the Stream's objects.
- customer_
managed_ strencryption_ key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired_
state str Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- labels Mapping[str, str]
Labels.
- project str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- destination
Config Property Map Destination connection profile configuration. Structure is documented below.
- display
Name String Display name.
- location String
The name of the location this stream is located in.
- source
Config Property Map Source connection profile configuration. Structure is documented below.
- stream
Id String The stream identifier.
- backfill
All Property Map Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None Property Map Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed StringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- labels Map<String>
Labels.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
Outputs
All input properties are implicitly available as output properties. Additionally, the Stream resource produces the following output properties:
Look up Existing Stream Resource
Get an existing Stream resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: StreamState, opts?: CustomResourceOptions): Stream
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
backfill_all: Optional[StreamBackfillAllArgs] = None,
backfill_none: Optional[StreamBackfillNoneArgs] = None,
customer_managed_encryption_key: Optional[str] = None,
desired_state: Optional[str] = None,
destination_config: Optional[StreamDestinationConfigArgs] = None,
display_name: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
location: Optional[str] = None,
name: Optional[str] = None,
project: Optional[str] = None,
source_config: Optional[StreamSourceConfigArgs] = None,
state: Optional[str] = None,
stream_id: Optional[str] = None) -> Stream
func GetStream(ctx *Context, name string, id IDInput, state *StreamState, opts ...ResourceOption) (*Stream, error)
public static Stream Get(string name, Input<string> id, StreamState? state, CustomResourceOptions? opts = null)
public static Stream get(String name, Output<String> id, StreamState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- Backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- Customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- Destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- Display
Name string Display name.
- Labels Dictionary<string, string>
Labels.
- Location string
The name of the location this stream is located in.
- Name string
The stream's name.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- State string
The state of the stream.
- Stream
Id string The stream identifier.
- Backfill
All StreamBackfill All Args Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- Backfill
None StreamBackfill None Args Backfill strategy to disable automatic backfill for the Stream's objects.
- Customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- Desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- Destination
Config StreamDestination Config Args Destination connection profile configuration. Structure is documented below.
- Display
Name string Display name.
- Labels map[string]string
Labels.
- Location string
The name of the location this stream is located in.
- Name string
The stream's name.
- Project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- Source
Config StreamSource Config Args Source connection profile configuration. Structure is documented below.
- State string
The state of the stream.
- Stream
Id string The stream identifier.
- backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed StringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- display
Name String Display name.
- labels Map<String,String>
Labels.
- location String
The name of the location this stream is located in.
- name String
The stream's name.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- state String
The state of the stream.
- stream
Id String The stream identifier.
- backfill
All StreamBackfill All Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None StreamBackfill None Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed stringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State string Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- destination
Config StreamDestination Config Destination connection profile configuration. Structure is documented below.
- display
Name string Display name.
- labels {[key: string]: string}
Labels.
- location string
The name of the location this stream is located in.
- name string
The stream's name.
- project string
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- source
Config StreamSource Config Source connection profile configuration. Structure is documented below.
- state string
The state of the stream.
- stream
Id string The stream identifier.
- backfill_
all StreamBackfill All Args Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill_
none StreamBackfill None Args Backfill strategy to disable automatic backfill for the Stream's objects.
- customer_
managed_ strencryption_ key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired_
state str Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- destination_
config StreamDestination Config Args Destination connection profile configuration. Structure is documented below.
- display_
name str Display name.
- labels Mapping[str, str]
Labels.
- location str
The name of the location this stream is located in.
- name str
The stream's name.
- project str
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- source_
config StreamSource Config Args Source connection profile configuration. Structure is documented below.
- state str
The state of the stream.
- stream_
id str The stream identifier.
- backfill
All Property Map Backfill strategy to automatically backfill the Stream's objects. Specific objects can be excluded. Structure is documented below.
- backfill
None Property Map Backfill strategy to disable automatic backfill for the Stream's objects.
- customer
Managed StringEncryption Key A reference to a KMS encryption key. If provided, it will be used to encrypt the data. If left blank, data will be encrypted using an internal Stream-specific encryption key provisioned through KMS.
- desired
State String Desired state of the Stream. Set this field to
RUNNING
to start the stream, andPAUSED
to pause the stream.- destination
Config Property Map Destination connection profile configuration. Structure is documented below.
- display
Name String Display name.
- labels Map<String>
Labels.
- location String
The name of the location this stream is located in.
- name String
The stream's name.
- project String
The ID of the project in which the resource belongs. If it is not provided, the provider project is used.
- source
Config Property Map Source connection profile configuration. Structure is documented below.
- state String
The state of the stream.
- stream
Id String The stream identifier.
Supporting Types
StreamBackfillAll, StreamBackfillAllArgs
- Mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects MySQL data source objects to avoid backfilling. Structure is documented below.
- Oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects MySQL data source objects to avoid backfilling. Structure is documented below.
- Oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- Postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded StreamObjects Backfill All Mysql Excluded Objects MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded StreamObjects Backfill All Oracle Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded StreamObjects Backfill All Postgresql Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- mysql_
excluded_ Streamobjects Backfill All Mysql Excluded Objects MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle_
excluded_ Streamobjects Backfill All Oracle Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql_
excluded_ Streamobjects Backfill All Postgresql Excluded Objects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- mysql
Excluded Property MapObjects MySQL data source objects to avoid backfilling. Structure is documented below.
- oracle
Excluded Property MapObjects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
- postgresql
Excluded Property MapObjects PostgreSQL data source objects to avoid backfilling. Structure is documented below.
StreamBackfillAllMysqlExcludedObjects, StreamBackfillAllMysqlExcludedObjectsArgs
- Mysql
Databases List<StreamBackfill All Mysql Excluded Objects Mysql Database> MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamBackfill All Mysql Excluded Objects Mysql Database MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamBackfill All Mysql Excluded Objects Mysql Database> MySQL databases on the server Structure is documented below.
- mysql
Databases StreamBackfill All Mysql Excluded Objects Mysql Database[] MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database] MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> MySQL databases on the server Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabase, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseArgs
- Database string
Database name.
- Mysql
Tables List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- Database string
Database name.
- Mysql
Tables []StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Tables in the database. Structure is documented below.
- database String
Database name.
- mysql
Tables List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- database string
Database name.
- mysql
Tables StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table[] Tables in the database. Structure is documented below.
- database str
Database name.
- mysql_
tables Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table] Tables in the database. Structure is documented below.
- database String
Database name.
- mysql
Tables List<Property Map> Tables in the database. Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTable, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableArgs
- Table string
Table name.
- Mysql
Columns List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
Table name.
- Mysql
Columns []StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- mysql
Columns List<StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
Table name.
- mysql
Columns StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column[] MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
Table name.
- mysql_
columns Sequence[StreamBackfill All Mysql Excluded Objects Mysql Database Mysql Table Mysql Column] MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- mysql
Columns List<Property Map> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumn, StreamBackfillAllMysqlExcludedObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
- Collation string
Column collation.
- Column string
Column name.
- Data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Primary
Key bool Whether or not the column represents a primary key.
- Collation string
Column collation.
- Column string
Column name.
- Data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Primary
Key bool Whether or not the column represents a primary key.
- collation String
Column collation.
- column String
Column name.
- data
Type String The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Integer
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Integer The ordinal position of the column in the table.
- primary
Key Boolean Whether or not the column represents a primary key.
- collation string
Column collation.
- column string
Column name.
- data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length number
(Output) Column length.
- nullable boolean
Whether or not the column can accept a null value.
- ordinal
Position number The ordinal position of the column in the table.
- primary
Key boolean Whether or not the column represents a primary key.
- collation str
Column collation.
- column str
Column name.
- data_
type str The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length int
(Output) Column length.
- nullable bool
Whether or not the column can accept a null value.
- ordinal_
position int The ordinal position of the column in the table.
- primary_
key bool Whether or not the column represents a primary key.
- collation String
Column collation.
- column String
Column name.
- data
Type String The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Number
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Number The ordinal position of the column in the table.
- primary
Key Boolean Whether or not the column represents a primary key.
StreamBackfillAllOracleExcludedObjects, StreamBackfillAllOracleExcludedObjectsArgs
- Oracle
Schemas List<StreamBackfill All Oracle Excluded Objects Oracle Schema> Oracle schemas/databases in the database server Structure is documented below.
- Oracle
Schemas []StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<StreamBackfill All Oracle Excluded Objects Oracle Schema> Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas StreamBackfill All Oracle Excluded Objects Oracle Schema[] Oracle schemas/databases in the database server Structure is documented below.
- oracle_
schemas Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema] Oracle schemas/databases in the database server Structure is documented below.
- oracle
Schemas List<Property Map> Oracle schemas/databases in the database server Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchema, StreamBackfillAllOracleExcludedObjectsOracleSchemaArgs
- Schema string
Schema name.
- Oracle
Tables List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table> Tables in the database. Structure is documented below.
- Schema string
Schema name.
- Oracle
Tables []StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Tables in the database. Structure is documented below.
- schema String
Schema name.
- oracle
Tables List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table> Tables in the database. Structure is documented below.
- schema string
Schema name.
- oracle
Tables StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table[] Tables in the database. Structure is documented below.
- schema str
Schema name.
- oracle_
tables Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table] Tables in the database. Structure is documented below.
- schema String
Schema name.
- oracle
Tables List<Property Map> Tables in the database. Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTable, StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableArgs
- Table string
Table name.
- Oracle
Columns List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column> Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
Table name.
- Oracle
Columns []StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- oracle
Columns List<StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column> Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
Table name.
- oracle
Columns StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column[] Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
Table name.
- oracle_
columns Sequence[StreamBackfill All Oracle Excluded Objects Oracle Schema Oracle Table Oracle Column] Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- oracle
Columns List<Property Map> Oracle columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumn, StreamBackfillAllOracleExcludedObjectsOracleSchemaOracleTableOracleColumnArgs
- Column string
Column name.
- Data
Type string The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
(Output) Column encoding.
- Length int
(Output) Column length.
- Nullable bool
(Output) Whether or not the column can accept a null value.
- Ordinal
Position int (Output) The ordinal position of the column in the table.
- Precision int
(Output) Column precision.
- Primary
Key bool (Output) Whether or not the column represents a primary key.
- Scale int
(Output) Column scale.
- Column string
Column name.
- Data
Type string The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- Encoding string
(Output) Column encoding.
- Length int
(Output) Column length.
- Nullable bool
(Output) Whether or not the column can accept a null value.
- Ordinal
Position int (Output) The ordinal position of the column in the table.
- Precision int
(Output) Column precision.
- Primary
Key bool (Output) Whether or not the column represents a primary key.
- Scale int
(Output) Column scale.
- column String
Column name.
- data
Type String The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
(Output) Column encoding.
- length Integer
(Output) Column length.
- nullable Boolean
(Output) Whether or not the column can accept a null value.
- ordinal
Position Integer (Output) The ordinal position of the column in the table.
- precision Integer
(Output) Column precision.
- primary
Key Boolean (Output) Whether or not the column represents a primary key.
- scale Integer
(Output) Column scale.
- column string
Column name.
- data
Type string The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding string
(Output) Column encoding.
- length number
(Output) Column length.
- nullable boolean
(Output) Whether or not the column can accept a null value.
- ordinal
Position number (Output) The ordinal position of the column in the table.
- precision number
(Output) Column precision.
- primary
Key boolean (Output) Whether or not the column represents a primary key.
- scale number
(Output) Column scale.
- column str
Column name.
- data_
type str The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding str
(Output) Column encoding.
- length int
(Output) Column length.
- nullable bool
(Output) Whether or not the column can accept a null value.
- ordinal_
position int (Output) The ordinal position of the column in the table.
- precision int
(Output) Column precision.
- primary_
key bool (Output) Whether or not the column represents a primary key.
- scale int
(Output) Column scale.
- column String
Column name.
- data
Type String The Oracle data type. Full data types list can be found here: https://docs.oracle.com/en/database/oracle/oracle-database/21/sqlrf/Data-Types.html
- encoding String
(Output) Column encoding.
- length Number
(Output) Column length.
- nullable Boolean
(Output) Whether or not the column can accept a null value.
- ordinal
Position Number (Output) The ordinal position of the column in the table.
- precision Number
(Output) Column precision.
- primary
Key Boolean (Output) Whether or not the column represents a primary key.
- scale Number
(Output) Column scale.
StreamBackfillAllPostgresqlExcludedObjects, StreamBackfillAllPostgresqlExcludedObjectsArgs
- Postgresql
Schemas List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema> PostgreSQL schemas on the server Structure is documented below.
- Postgresql
Schemas []StreamBackfill All Postgresql Excluded Objects Postgresql Schema PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema> PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas StreamBackfill All Postgresql Excluded Objects Postgresql Schema[] PostgreSQL schemas on the server Structure is documented below.
- postgresql_
schemas Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema] PostgreSQL schemas on the server Structure is documented below.
- postgresql
Schemas List<Property Map> PostgreSQL schemas on the server Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchema, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaArgs
- Schema string
Database name.
- Postgresql
Tables List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table> Tables in the schema. Structure is documented below.
- Schema string
Database name.
- Postgresql
Tables []StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Tables in the schema. Structure is documented below.
- schema String
Database name.
- postgresql
Tables List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table> Tables in the schema. Structure is documented below.
- schema string
Database name.
- postgresql
Tables StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table[] Tables in the schema. Structure is documented below.
- schema str
Database name.
- postgresql_
tables Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table] Tables in the schema. Structure is documented below.
- schema String
Database name.
- postgresql
Tables List<Property Map> Tables in the schema. Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTable, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTableArgs
- Table string
Table name.
- Postgresql
Columns List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column> PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
Table name.
- Postgresql
Columns []StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- postgresql
Columns List<StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column> PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
Table name.
- postgresql
Columns StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column[] PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
Table name.
- postgresql_
columns Sequence[StreamBackfill All Postgresql Excluded Objects Postgresql Schema Postgresql Table Postgresql Column] PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- postgresql
Columns List<Property Map> PostgreSQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumn, StreamBackfillAllPostgresqlExcludedObjectsPostgresqlSchemaPostgresqlTablePostgresqlColumnArgs
- Column string
Column name.
- Data
Type string The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Precision int
(Output) Column precision.
- Primary
Key bool Whether or not the column represents a primary key.
- Scale int
(Output) Column scale.
- Column string
Column name.
- Data
Type string The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Precision int
(Output) Column precision.
- Primary
Key bool Whether or not the column represents a primary key.
- Scale int
(Output) Column scale.
- column String
Column name.
- data
Type String The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Integer
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Integer The ordinal position of the column in the table.
- precision Integer
(Output) Column precision.
- primary
Key Boolean Whether or not the column represents a primary key.
- scale Integer
(Output) Column scale.
- column string
Column name.
- data
Type string The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length number
(Output) Column length.
- nullable boolean
Whether or not the column can accept a null value.
- ordinal
Position number The ordinal position of the column in the table.
- precision number
(Output) Column precision.
- primary
Key boolean Whether or not the column represents a primary key.
- scale number
(Output) Column scale.
- column str
Column name.
- data_
type str The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length int
(Output) Column length.
- nullable bool
Whether or not the column can accept a null value.
- ordinal_
position int The ordinal position of the column in the table.
- precision int
(Output) Column precision.
- primary_
key bool Whether or not the column represents a primary key.
- scale int
(Output) Column scale.
- column String
Column name.
- data
Type String The PostgreSQL data type. Full data types list can be found here: https://www.postgresql.org/docs/current/datatype.html
- length Number
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Number The ordinal position of the column in the table.
- precision Number
(Output) Column precision.
- primary
Key Boolean Whether or not the column represents a primary key.
- scale Number
(Output) Column scale.
StreamDestinationConfig, StreamDestinationConfigArgs
- Destination
Connection stringProfile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Bigquery
Destination StreamConfig Destination Config Bigquery Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Gcs
Destination StreamConfig Destination Config Gcs Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Destination
Connection stringProfile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Bigquery
Destination StreamConfig Destination Config Bigquery Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- Gcs
Destination StreamConfig Destination Config Gcs Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection StringProfile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination StreamConfig Destination Config Bigquery Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination StreamConfig Destination Config Gcs Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection stringProfile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination StreamConfig Destination Config Bigquery Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination StreamConfig Destination Config Gcs Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination_
connection_ strprofile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery_
destination_ Streamconfig Destination Config Bigquery Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs_
destination_ Streamconfig Destination Config Gcs Destination Config A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- destination
Connection StringProfile Destination connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- bigquery
Destination Property MapConfig A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
- gcs
Destination Property MapConfig A configuration for how data should be loaded to Cloud Storage. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfig, StreamDestinationConfigBigqueryDestinationConfigArgs
- Data
Freshness string The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- Single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset A single target dataset to which all data will be streamed. Structure is documented below.
- Source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- Data
Freshness string The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- Single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset A single target dataset to which all data will be streamed. Structure is documented below.
- Source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness String The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness string The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target StreamDataset Destination Config Bigquery Destination Config Single Target Dataset A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy StreamDatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data_
freshness str The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single_
target_ Streamdataset Destination Config Bigquery Destination Config Single Target Dataset A single target dataset to which all data will be streamed. Structure is documented below.
- source_
hierarchy_ Streamdatasets Destination Config Bigquery Destination Config Source Hierarchy Datasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
- data
Freshness String The guaranteed data freshness (in seconds) when querying tables created by the stream. Editing this field will only affect new tables created in the future, but existing tables will not be impacted. Lower values mean that queries will return fresher data, but may result in higher cost. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- single
Target Property MapDataset A single target dataset to which all data will be streamed. Structure is documented below.
- source
Hierarchy Property MapDatasets Destination datasets are created so that hierarchy of the destination data objects matches the source hierarchy. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfigSingleTargetDataset, StreamDestinationConfigBigqueryDestinationConfigSingleTargetDatasetArgs
- Dataset
Id string Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- Dataset
Id string Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id String Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id string Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset_
id str Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
- dataset
Id String Dataset ID in the format projects/{project}/datasets/{dataset_id} or {project}:{dataset_id}
StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasets, StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsArgs
- Dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template Dataset template used for dynamic dataset creation. Structure is documented below.
- Dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset_
template StreamDestination Config Bigquery Destination Config Source Hierarchy Datasets Dataset Template Dataset template used for dynamic dataset creation. Structure is documented below.
- dataset
Template Property Map Dataset template used for dynamic dataset creation. Structure is documented below.
StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplate, StreamDestinationConfigBigqueryDestinationConfigSourceHierarchyDatasetsDatasetTemplateArgs
- Location string
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- Dataset
Id stringPrefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- Kms
Key stringName Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- Location string
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- Dataset
Id stringPrefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- Kms
Key stringName Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location String
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id StringPrefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key StringName Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location string
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id stringPrefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key stringName Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location str
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset_
id_ strprefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms_
key_ strname Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
- location String
The geographic location where the dataset should reside. See https://cloud.google.com/bigquery/docs/locations for supported locations.
- dataset
Id StringPrefix If supplied, every created dataset will have its name prefixed by the provided value. The prefix and name will be separated by an underscore. i.e. _.
- kms
Key StringName Describes the Cloud KMS encryption key that will be used to protect destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information.
StreamDestinationConfigGcsDestinationConfig, StreamDestinationConfigGcsDestinationConfigArgs
- Avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format AVRO file format configuration.
- File
Rotation stringInterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- File
Rotation intMb The maximum file size to be saved in the bucket.
- Json
File StreamFormat Destination Config Gcs Destination Config Json File Format JSON file format configuration. Structure is documented below.
- Path string
Path inside the Cloud Storage bucket to write data to.
- Avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format AVRO file format configuration.
- File
Rotation stringInterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- File
Rotation intMb The maximum file size to be saved in the bucket.
- Json
File StreamFormat Destination Config Gcs Destination Config Json File Format JSON file format configuration. Structure is documented below.
- Path string
Path inside the Cloud Storage bucket to write data to.
- avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format AVRO file format configuration.
- file
Rotation StringInterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation IntegerMb The maximum file size to be saved in the bucket.
- json
File StreamFormat Destination Config Gcs Destination Config Json File Format JSON file format configuration. Structure is documented below.
- path String
Path inside the Cloud Storage bucket to write data to.
- avro
File StreamFormat Destination Config Gcs Destination Config Avro File Format AVRO file format configuration.
- file
Rotation stringInterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation numberMb The maximum file size to be saved in the bucket.
- json
File StreamFormat Destination Config Gcs Destination Config Json File Format JSON file format configuration. Structure is documented below.
- path string
Path inside the Cloud Storage bucket to write data to.
- avro_
file_ Streamformat Destination Config Gcs Destination Config Avro File Format AVRO file format configuration.
- file_
rotation_ strinterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file_
rotation_ intmb The maximum file size to be saved in the bucket.
- json_
file_ Streamformat Destination Config Gcs Destination Config Json File Format JSON file format configuration. Structure is documented below.
- path str
Path inside the Cloud Storage bucket to write data to.
- avro
File Property MapFormat AVRO file format configuration.
- file
Rotation StringInterval The maximum duration for which new events are added before a file is closed and a new file is created. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s". Defaults to 900s.
- file
Rotation NumberMb The maximum file size to be saved in the bucket.
- json
File Property MapFormat JSON file format configuration. Structure is documented below.
- path String
Path inside the Cloud Storage bucket to write data to.
StreamDestinationConfigGcsDestinationConfigJsonFileFormat, StreamDestinationConfigGcsDestinationConfigJsonFileFormatArgs
- Compression string
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- Schema
File stringFormat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- Compression string
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- Schema
File stringFormat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression String
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- schema
File StringFormat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression string
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- schema
File stringFormat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression str
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- schema_
file_ strformat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
- compression String
Compression of the loaded JSON file. Possible values are:
NO_COMPRESSION
,GZIP
.- schema
File StringFormat The schema file format along JSON data files. Possible values are:
NO_SCHEMA_FILE
,AVRO_SCHEMA_FILE
.
StreamSourceConfig, StreamSourceConfigArgs
- Source
Connection stringProfile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Mysql
Source StreamConfig Source Config Mysql Source Config MySQL data source configuration. Structure is documented below.
- Oracle
Source StreamConfig Source Config Oracle Source Config MySQL data source configuration. Structure is documented below.
- Postgresql
Source StreamConfig Source Config Postgresql Source Config PostgreSQL data source configuration. Structure is documented below.
- Source
Connection stringProfile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- Mysql
Source StreamConfig Source Config Mysql Source Config MySQL data source configuration. Structure is documented below.
- Oracle
Source StreamConfig Source Config Oracle Source Config MySQL data source configuration. Structure is documented below.
- Postgresql
Source StreamConfig Source Config Postgresql Source Config PostgreSQL data source configuration. Structure is documented below.
- source
Connection StringProfile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source StreamConfig Source Config Mysql Source Config MySQL data source configuration. Structure is documented below.
- oracle
Source StreamConfig Source Config Oracle Source Config MySQL data source configuration. Structure is documented below.
- postgresql
Source StreamConfig Source Config Postgresql Source Config PostgreSQL data source configuration. Structure is documented below.
- source
Connection stringProfile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source StreamConfig Source Config Mysql Source Config MySQL data source configuration. Structure is documented below.
- oracle
Source StreamConfig Source Config Oracle Source Config MySQL data source configuration. Structure is documented below.
- postgresql
Source StreamConfig Source Config Postgresql Source Config PostgreSQL data source configuration. Structure is documented below.
- source_
connection_ strprofile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql_
source_ Streamconfig Source Config Mysql Source Config MySQL data source configuration. Structure is documented below.
- oracle_
source_ Streamconfig Source Config Oracle Source Config MySQL data source configuration. Structure is documented below.
- postgresql_
source_ Streamconfig Source Config Postgresql Source Config PostgreSQL data source configuration. Structure is documented below.
- source
Connection StringProfile Source connection profile resource. Format: projects/{project}/locations/{location}/connectionProfiles/{name}
- mysql
Source Property MapConfig MySQL data source configuration. Structure is documented below.
- oracle
Source Property MapConfig MySQL data source configuration. Structure is documented below.
- postgresql
Source Property MapConfig PostgreSQL data source configuration. Structure is documented below.
StreamSourceConfigMysqlSourceConfig, StreamSourceConfigMysqlSourceConfigArgs
- Exclude
Objects StreamSource Config Mysql Source Config Exclude Objects MySQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Mysql Source Config Include Objects MySQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Exclude
Objects StreamSource Config Mysql Source Config Exclude Objects MySQL objects to exclude from the stream. Structure is documented below.
- Include
Objects StreamSource Config Mysql Source Config Include Objects MySQL objects to retrieve from the source. Structure is documented below.
- Max
Concurrent intBackfill Tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- Max
Concurrent intCdc Tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects StreamSource Config Mysql Source Config Exclude Objects MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Mysql Source Config Include Objects MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent IntegerBackfill Tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent IntegerCdc Tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects StreamSource Config Mysql Source Config Exclude Objects MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects StreamSource Config Mysql Source Config Include Objects MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent numberBackfill Tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent numberCdc Tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude_
objects StreamSource Config Mysql Source Config Exclude Objects MySQL objects to exclude from the stream. Structure is documented below.
- include_
objects StreamSource Config Mysql Source Config Include Objects MySQL objects to retrieve from the source. Structure is documented below.
- max_
concurrent_ intbackfill_ tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max_
concurrent_ intcdc_ tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- exclude
Objects Property Map MySQL objects to exclude from the stream. Structure is documented below.
- include
Objects Property Map MySQL objects to retrieve from the source. Structure is documented below.
- max
Concurrent NumberBackfill Tasks Maximum number of concurrent backfill tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
- max
Concurrent NumberCdc Tasks Maximum number of concurrent CDC tasks. The number should be non negative. If not set (or set to 0), the system's default value will be used.
StreamSourceConfigMysqlSourceConfigExcludeObjects, StreamSourceConfigMysqlSourceConfigExcludeObjectsArgs
- Mysql
Databases List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database> MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamSource Config Mysql Source Config Exclude Objects Mysql Database MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database> MySQL databases on the server Structure is documented below.
- mysql
Databases StreamSource Config Mysql Source Config Exclude Objects Mysql Database[] MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database] MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> MySQL databases on the server Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabase, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseArgs
- Database string
Database name.
- Mysql
Tables List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- Database string
Database name.
- Mysql
Tables []StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Tables in the database. Structure is documented below.
- database String
Database name.
- mysql
Tables List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- database string
Database name.
- mysql
Tables StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table[] Tables in the database. Structure is documented below.
- database str
Database name.
- mysql_
tables Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table] Tables in the database. Structure is documented below.
- database String
Database name.
- mysql
Tables List<Property Map> Tables in the database. Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTable, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableArgs
- Table string
Table name.
- Mysql
Columns List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- Table string
Table name.
- Mysql
Columns []StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- mysql
Columns List<StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table string
Table name.
- mysql
Columns StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column[] MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table str
Table name.
- mysql_
columns Sequence[StreamSource Config Mysql Source Config Exclude Objects Mysql Database Mysql Table Mysql Column] MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
- table String
Table name.
- mysql
Columns List<Property Map> MySQL columns in the schema. When unspecified as part of include/exclude objects, includes/excludes everything. Structure is documented below.
StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumn, StreamSourceConfigMysqlSourceConfigExcludeObjectsMysqlDatabaseMysqlTableMysqlColumnArgs
- Collation string
Column collation.
- Column string
Column name.
- Data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Primary
Key bool Whether or not the column represents a primary key.
- Collation string
Column collation.
- Column string
Column name.
- Data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- Length int
(Output) Column length.
- Nullable bool
Whether or not the column can accept a null value.
- Ordinal
Position int The ordinal position of the column in the table.
- Primary
Key bool Whether or not the column represents a primary key.
- collation String
Column collation.
- column String
Column name.
- data
Type String The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Integer
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Integer The ordinal position of the column in the table.
- primary
Key Boolean Whether or not the column represents a primary key.
- collation string
Column collation.
- column string
Column name.
- data
Type string The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length number
(Output) Column length.
- nullable boolean
Whether or not the column can accept a null value.
- ordinal
Position number The ordinal position of the column in the table.
- primary
Key boolean Whether or not the column represents a primary key.
- collation str
Column collation.
- column str
Column name.
- data_
type str The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length int
(Output) Column length.
- nullable bool
Whether or not the column can accept a null value.
- ordinal_
position int The ordinal position of the column in the table.
- primary_
key bool Whether or not the column represents a primary key.
- collation String
Column collation.
- column String
Column name.
- data
Type String The MySQL data type. Full data types list can be found here: https://dev.mysql.com/doc/refman/8.0/en/data-types.html
- length Number
(Output) Column length.
- nullable Boolean
Whether or not the column can accept a null value.
- ordinal
Position Number The ordinal position of the column in the table.
- primary
Key Boolean Whether or not the column represents a primary key.
StreamSourceConfigMysqlSourceConfigIncludeObjects, StreamSourceConfigMysqlSourceConfigIncludeObjectsArgs
- Mysql
Databases List<StreamSource Config Mysql Source Config Include Objects Mysql Database> MySQL databases on the server Structure is documented below.
- Mysql
Databases []StreamSource Config Mysql Source Config Include Objects Mysql Database MySQL databases on the server Structure is documented below.
- mysql
Databases List<StreamSource Config Mysql Source Config Include Objects Mysql Database> MySQL databases on the server Structure is documented below.
- mysql
Databases StreamSource Config Mysql Source Config Include Objects Mysql Database[] MySQL databases on the server Structure is documented below.
- mysql_
databases Sequence[StreamSource Config Mysql Source Config Include Objects Mysql Database] MySQL databases on the server Structure is documented below.
- mysql
Databases List<Property Map> MySQL databases on the server Structure is documented below.
StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabase, StreamSourceConfigMysqlSourceConfigIncludeObjectsMysqlDatabaseArgs
- Database string
Database name.
- Mysql
Tables List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- Database string
Database name.
- Mysql
Tables []StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table Tables in the database. Structure is documented below.
- database String
Database name.
- mysql
Tables List<StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table> Tables in the database. Structure is documented below.
- database string
Database name.
- mysql
Tables StreamSource Config Mysql Source Config Include Objects Mysql Database Mysql Table[] Tables in the database. Structure is documented below.