Yandex
MdbKafkaTopic
Manages a topic of a Kafka cluster within the Yandex.Cloud. For more information, see the official documentation.
Example Usage
using Pulumi;
using Yandex = Pulumi.Yandex;
class MyStack : Stack
{
public MyStack()
{
var foo = new Yandex.MdbKafkaCluster("foo", new Yandex.MdbKafkaClusterArgs
{
NetworkId = "c64vs98keiqc7f24pvkd",
Config = new Yandex.Inputs.MdbKafkaClusterConfigArgs
{
Version = "2.8",
Zones =
{
"ru-central1-a",
},
UnmanagedTopics = true,
Kafka = new Yandex.Inputs.MdbKafkaClusterConfigKafkaArgs
{
Resources = new Yandex.Inputs.MdbKafkaClusterConfigKafkaResourcesArgs
{
ResourcePresetId = "s2.micro",
DiskTypeId = "network-hdd",
DiskSize = 16,
},
},
},
});
var events = new Yandex.MdbKafkaTopic("events", new Yandex.MdbKafkaTopicArgs
{
ClusterId = foo.Id,
Partitions = 4,
ReplicationFactor = 1,
TopicConfig = new Yandex.Inputs.MdbKafkaTopicTopicConfigArgs
{
CleanupPolicy = "CLEANUP_POLICY_COMPACT",
CompressionType = "COMPRESSION_TYPE_LZ4",
DeleteRetentionMs = "86400000",
FileDeleteDelayMs = "60000",
FlushMessages = "128",
FlushMs = "1000",
MinCompactionLagMs = "0",
RetentionBytes = "10737418240",
RetentionMs = "604800000",
MaxMessageBytes = "1048588",
MinInsyncReplicas = "1",
SegmentBytes = "268435456",
Preallocate = true,
},
});
}
}
package main
import (
"github.com/pulumi/pulumi-yandex/sdk/go/yandex"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
foo, err := yandex.NewMdbKafkaCluster(ctx, "foo", &yandex.MdbKafkaClusterArgs{
NetworkId: pulumi.String("c64vs98keiqc7f24pvkd"),
Config: &MdbKafkaClusterConfigArgs{
Version: pulumi.String("2.8"),
Zones: pulumi.StringArray{
pulumi.String("ru-central1-a"),
},
UnmanagedTopics: pulumi.Bool(true),
Kafka: &MdbKafkaClusterConfigKafkaArgs{
Resources: &MdbKafkaClusterConfigKafkaResourcesArgs{
ResourcePresetId: pulumi.String("s2.micro"),
DiskTypeId: pulumi.String("network-hdd"),
DiskSize: pulumi.Int(16),
},
},
},
})
if err != nil {
return err
}
_, err = yandex.NewMdbKafkaTopic(ctx, "events", &yandex.MdbKafkaTopicArgs{
ClusterId: foo.ID(),
Partitions: pulumi.Int(4),
ReplicationFactor: pulumi.Int(1),
TopicConfig: &MdbKafkaTopicTopicConfigArgs{
CleanupPolicy: pulumi.String("CLEANUP_POLICY_COMPACT"),
CompressionType: pulumi.String("COMPRESSION_TYPE_LZ4"),
DeleteRetentionMs: pulumi.String("86400000"),
FileDeleteDelayMs: pulumi.String("60000"),
FlushMessages: pulumi.String("128"),
FlushMs: pulumi.String("1000"),
MinCompactionLagMs: pulumi.String("0"),
RetentionBytes: pulumi.String("10737418240"),
RetentionMs: pulumi.String("604800000"),
MaxMessageBytes: pulumi.String("1048588"),
MinInsyncReplicas: pulumi.String("1"),
SegmentBytes: pulumi.String("268435456"),
Preallocate: pulumi.Bool(true),
},
})
if err != nil {
return err
}
return nil
})
}
Coming soon!
import pulumi
import pulumi_yandex as yandex
foo = yandex.MdbKafkaCluster("foo",
network_id="c64vs98keiqc7f24pvkd",
config=yandex.MdbKafkaClusterConfigArgs(
version="2.8",
zones=["ru-central1-a"],
unmanaged_topics=True,
kafka=yandex.MdbKafkaClusterConfigKafkaArgs(
resources=yandex.MdbKafkaClusterConfigKafkaResourcesArgs(
resource_preset_id="s2.micro",
disk_type_id="network-hdd",
disk_size=16,
),
),
))
events = yandex.MdbKafkaTopic("events",
cluster_id=foo.id,
partitions=4,
replication_factor=1,
topic_config=yandex.MdbKafkaTopicTopicConfigArgs(
cleanup_policy="CLEANUP_POLICY_COMPACT",
compression_type="COMPRESSION_TYPE_LZ4",
delete_retention_ms="86400000",
file_delete_delay_ms="60000",
flush_messages="128",
flush_ms="1000",
min_compaction_lag_ms="0",
retention_bytes="10737418240",
retention_ms="604800000",
max_message_bytes="1048588",
min_insync_replicas="1",
segment_bytes="268435456",
preallocate=True,
))
import * as pulumi from "@pulumi/pulumi";
import * as yandex from "@pulumi/yandex";
const foo = new yandex.MdbKafkaCluster("foo", {
networkId: "c64vs98keiqc7f24pvkd",
config: {
version: "2.8",
zones: ["ru-central1-a"],
unmanagedTopics: true,
kafka: {
resources: {
resourcePresetId: "s2.micro",
diskTypeId: "network-hdd",
diskSize: 16,
},
},
},
});
const events = new yandex.MdbKafkaTopic("events", {
clusterId: foo.id,
partitions: 4,
replicationFactor: 1,
topicConfig: {
cleanupPolicy: "CLEANUP_POLICY_COMPACT",
compressionType: "COMPRESSION_TYPE_LZ4",
deleteRetentionMs: 86400000,
fileDeleteDelayMs: 60000,
flushMessages: 128,
flushMs: 1000,
minCompactionLagMs: 0,
retentionBytes: 10737418240,
retentionMs: 604800000,
maxMessageBytes: 1048588,
minInsyncReplicas: 1,
segmentBytes: 268435456,
preallocate: true,
},
});
Coming soon!
Create a MdbKafkaTopic Resource
new MdbKafkaTopic(name: string, args: MdbKafkaTopicArgs, opts?: CustomResourceOptions);
@overload
def MdbKafkaTopic(resource_name: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
name: Optional[str] = None,
partitions: Optional[int] = None,
replication_factor: Optional[int] = None,
topic_config: Optional[MdbKafkaTopicTopicConfigArgs] = None)
@overload
def MdbKafkaTopic(resource_name: str,
args: MdbKafkaTopicArgs,
opts: Optional[ResourceOptions] = None)
func NewMdbKafkaTopic(ctx *Context, name string, args MdbKafkaTopicArgs, opts ...ResourceOption) (*MdbKafkaTopic, error)
public MdbKafkaTopic(string name, MdbKafkaTopicArgs args, CustomResourceOptions? opts = null)
public MdbKafkaTopic(String name, MdbKafkaTopicArgs args)
public MdbKafkaTopic(String name, MdbKafkaTopicArgs args, CustomResourceOptions options)
type: yandex:MdbKafkaTopic
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args MdbKafkaTopicArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args MdbKafkaTopicArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args MdbKafkaTopicArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args MdbKafkaTopicArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args MdbKafkaTopicArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
MdbKafkaTopic Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The MdbKafkaTopic resource accepts the following input properties:
- Cluster
Id string - Partitions int
The number of the topic's partitions.
- Replication
Factor int Amount of data copies (replicas) for the topic in the cluster.
- Name string
The name of the topic.
- Topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- Cluster
Id string - Partitions int
The number of the topic's partitions.
- Replication
Factor int Amount of data copies (replicas) for the topic in the cluster.
- Name string
The name of the topic.
- Topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id String - partitions Integer
The number of the topic's partitions.
- replication
Factor Integer Amount of data copies (replicas) for the topic in the cluster.
- name String
The name of the topic.
- topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id string - partitions number
The number of the topic's partitions.
- replication
Factor number Amount of data copies (replicas) for the topic in the cluster.
- name string
The name of the topic.
- topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster_
id str - partitions int
The number of the topic's partitions.
- replication_
factor int Amount of data copies (replicas) for the topic in the cluster.
- name str
The name of the topic.
- topic_
config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id String - partitions Number
The number of the topic's partitions.
- replication
Factor Number Amount of data copies (replicas) for the topic in the cluster.
- name String
The name of the topic.
- topic
Config Property Map User-defined settings for the topic. The structure is documented below.
Outputs
All input properties are implicitly available as output properties. Additionally, the MdbKafkaTopic resource produces the following output properties:
- Id string
The provider-assigned unique ID for this managed resource.
- Id string
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
- id string
The provider-assigned unique ID for this managed resource.
- id str
The provider-assigned unique ID for this managed resource.
- id String
The provider-assigned unique ID for this managed resource.
Look up an Existing MdbKafkaTopic Resource
Get an existing MdbKafkaTopic resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: MdbKafkaTopicState, opts?: CustomResourceOptions): MdbKafkaTopic
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
cluster_id: Optional[str] = None,
name: Optional[str] = None,
partitions: Optional[int] = None,
replication_factor: Optional[int] = None,
topic_config: Optional[MdbKafkaTopicTopicConfigArgs] = None) -> MdbKafkaTopic
func GetMdbKafkaTopic(ctx *Context, name string, id IDInput, state *MdbKafkaTopicState, opts ...ResourceOption) (*MdbKafkaTopic, error)
public static MdbKafkaTopic Get(string name, Input<string> id, MdbKafkaTopicState? state, CustomResourceOptions? opts = null)
public static MdbKafkaTopic get(String name, Output<String> id, MdbKafkaTopicState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Cluster
Id string - Name string
The name of the topic.
- Partitions int
The number of the topic's partitions.
- Replication
Factor int Amount of data copies (replicas) for the topic in the cluster.
- Topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- Cluster
Id string - Name string
The name of the topic.
- Partitions int
The number of the topic's partitions.
- Replication
Factor int Amount of data copies (replicas) for the topic in the cluster.
- Topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id String - name String
The name of the topic.
- partitions Integer
The number of the topic's partitions.
- replication
Factor Integer Amount of data copies (replicas) for the topic in the cluster.
- topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id string - name string
The name of the topic.
- partitions number
The number of the topic's partitions.
- replication
Factor number Amount of data copies (replicas) for the topic in the cluster.
- topic
Config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster_
id str - name str
The name of the topic.
- partitions int
The number of the topic's partitions.
- replication_
factor int Amount of data copies (replicas) for the topic in the cluster.
- topic_
config MdbKafka Topic Topic Config Args User-defined settings for the topic. The structure is documented below.
- cluster
Id String - name String
The name of the topic.
- partitions Number
The number of the topic's partitions.
- replication
Factor Number Amount of data copies (replicas) for the topic in the cluster.
- topic
Config Property Map User-defined settings for the topic. The structure is documented below.
Supporting Types
MdbKafkaTopicTopicConfig
- Cleanup
Policy string - Compression
Type string - Delete
Retention stringMs - File
Delete stringDelay Ms - Flush
Messages string - Flush
Ms string - Max
Message stringBytes - Min
Compaction stringLag Ms - Min
Insync stringReplicas - Preallocate bool
- Retention
Bytes string - Retention
Ms string - Segment
Bytes string
- Cleanup
Policy string - Compression
Type string - Delete
Retention stringMs - File
Delete stringDelay Ms - Flush
Messages string - Flush
Ms string - Max
Message stringBytes - Min
Compaction stringLag Ms - Min
Insync stringReplicas - Preallocate bool
- Retention
Bytes string - Retention
Ms string - Segment
Bytes string
- cleanup
Policy String - compression
Type String - delete
Retention StringMs - file
Delete StringDelay Ms - flush
Messages String - flush
Ms String - max
Message StringBytes - min
Compaction StringLag Ms - min
Insync StringReplicas - preallocate Boolean
- retention
Bytes String - retention
Ms String - segment
Bytes String
- cleanup
Policy string - compression
Type string - delete
Retention stringMs - file
Delete stringDelay Ms - flush
Messages string - flush
Ms string - max
Message stringBytes - min
Compaction stringLag Ms - min
Insync stringReplicas - preallocate boolean
- retention
Bytes string - retention
Ms string - segment
Bytes string
- cleanup_
policy str - compression_
type str - delete_
retention_ strms - file_
delete_ strdelay_ ms - flush_
messages str - flush_
ms str - max_
message_ strbytes - min_
compaction_ strlag_ ms - min_
insync_ strreplicas - preallocate bool
- retention_
bytes str - retention_
ms str - segment_
bytes str
- cleanup
Policy String - compression
Type String - delete
Retention StringMs - file
Delete StringDelay Ms - flush
Messages String - flush
Ms String - max
Message StringBytes - min
Compaction StringLag Ms - min
Insync StringReplicas - preallocate Boolean
- retention
Bytes String - retention
Ms String - segment
Bytes String
Import
Kafka topic can be imported using following format
$ pulumi import yandex:index/mdbKafkaTopic:MdbKafkaTopic foo {{cluster_id}}:{{topic_name}}
Package Details
- Repository
- https://github.com/pulumi/pulumi-yandex
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
yandex
Terraform Provider.