Viewing docs for Confluent v0.1.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
published on Monday, Mar 9, 2026 by Pulumi
Viewing docs for Confluent v0.1.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
published on Monday, Mar 9, 2026 by Pulumi
confluentcloud.KafkaTopic describes a Kafka Topic data source.
Example Usage
using Pulumi;
using ConfluentCloud = Pulumi.ConfluentCloud;
class MyStack : Stack
{
public MyStack()
{
var orders = Output.Create(ConfluentCloud.GetKafkaTopic.InvokeAsync(new ConfluentCloud.GetKafkaTopicArgs
{
KafkaCluster = new ConfluentCloud.Inputs.GetKafkaTopicKafkaClusterArgs
{
Id = confluent_kafka_cluster.Basic_cluster.Id,
},
TopicName = "orders",
HttpEndpoint = confluent_kafka_cluster.Basic_cluster.Http_endpoint,
Credentials = new ConfluentCloud.Inputs.GetKafkaTopicCredentialsArgs
{
Key = "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
Secret = "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
}));
this.Config = orders.Apply(orders => orders.Config);
}
[Output("config")]
public Output<string> Config { get; set; }
}
package main
import (
"github.com/pulumi/pulumi-confluentcloud/sdk/go/confluentcloud"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
orders, err := confluentcloud.LookupKafkaTopic(ctx, &GetKafkaTopicArgs{
KafkaCluster: GetKafkaTopicKafkaCluster{
Id: confluent_kafka_cluster.Basic - cluster.Id,
},
TopicName: "orders",
HttpEndpoint: confluent_kafka_cluster.Basic - cluster.Http_endpoint,
Credentials: GetKafkaTopicCredentials{
Key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
Secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
}, nil)
if err != nil {
return err
}
ctx.Export("config", orders.Config)
return nil
})
}
package generated_program;
import java.util.*;
import java.io.*;
import java.nio.*;
import com.pulumi.*;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var orders = Output.of(ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
.kafkaCluster(GetKafkaTopicKafkaClusterArgs.builder()
.id(confluent_kafka_cluster.basic-cluster().id())
.build())
.topicName("orders")
.httpEndpoint(confluent_kafka_cluster.basic-cluster().http_endpoint())
.credentials(GetKafkaTopicCredentialsArgs.builder()
.key("<Kafka API Key for confluent_kafka_cluster.basic-cluster>")
.secret("<Kafka API Secret for confluent_kafka_cluster.basic-cluster>")
.build())
.build()));
ctx.export("config", orders.apply(getKafkaTopicResult -> getKafkaTopicResult.config()));
}
}
import * as pulumi from "@pulumi/pulumi";
import * as confluentcloud from "@pulumi/confluentcloud";
const orders = confluentcloud.getKafkaTopic({
kafkaCluster: {
id: confluent_kafka_cluster["basic-cluster"].id,
},
topicName: "orders",
httpEndpoint: confluent_kafka_cluster["basic-cluster"].http_endpoint,
credentials: {
key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
},
});
export const config = orders.then(orders => orders.config);
import pulumi
import pulumi_confluentcloud as confluentcloud
orders = confluentcloud.get_kafka_topic(kafka_cluster=confluentcloud.GetKafkaTopicKafkaClusterArgs(
id=confluent_kafka_cluster["basic-cluster"]["id"],
),
topic_name="orders",
http_endpoint=confluent_kafka_cluster["basic-cluster"]["http_endpoint"],
credentials=confluentcloud.GetKafkaTopicCredentialsArgs(
key="<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
secret="<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
))
pulumi.export("config", orders.config)
variables:
orders:
Fn::Invoke:
Function: confluentcloud:getKafkaTopic
Arguments:
kafkaCluster:
id: ${confluent_kafka_cluster"basic-cluster"[%!s(MISSING)].id}
topicName: orders
httpEndpoint: ${confluent_kafka_cluster"basic-cluster"[%!s(MISSING)].http_endpoint}
credentials:
key: <Kafka API Key for confluent_kafka_cluster.basic-cluster>
secret: <Kafka API Secret for confluent_kafka_cluster.basic-cluster>
outputs:
config: ${orders.config}
Using getKafkaTopic
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getKafkaTopic(args: GetKafkaTopicArgs, opts?: InvokeOptions): Promise<GetKafkaTopicResult>
function getKafkaTopicOutput(args: GetKafkaTopicOutputArgs, opts?: InvokeOptions): Output<GetKafkaTopicResult>def get_kafka_topic(credentials: Optional[GetKafkaTopicCredentials] = None,
http_endpoint: Optional[str] = None,
kafka_cluster: Optional[GetKafkaTopicKafkaCluster] = None,
topic_name: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetKafkaTopicResult
def get_kafka_topic_output(credentials: Optional[pulumi.Input[GetKafkaTopicCredentialsArgs]] = None,
http_endpoint: Optional[pulumi.Input[str]] = None,
kafka_cluster: Optional[pulumi.Input[GetKafkaTopicKafkaClusterArgs]] = None,
topic_name: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetKafkaTopicResult]func LookupKafkaTopic(ctx *Context, args *LookupKafkaTopicArgs, opts ...InvokeOption) (*LookupKafkaTopicResult, error)
func LookupKafkaTopicOutput(ctx *Context, args *LookupKafkaTopicOutputArgs, opts ...InvokeOption) LookupKafkaTopicResultOutput> Note: This function is named LookupKafkaTopic in the Go SDK.
public static class GetKafkaTopic
{
public static Task<GetKafkaTopicResult> InvokeAsync(GetKafkaTopicArgs args, InvokeOptions? opts = null)
public static Output<GetKafkaTopicResult> Invoke(GetKafkaTopicInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
public static Output<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
fn::invoke:
function: confluentcloud:index/getKafkaTopic:getKafkaTopic
arguments:
# arguments dictionaryThe following arguments are supported:
- Credentials
Pulumi.
Confluent Cloud. Inputs. Get Kafka Topic Credentials - Http
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - Kafka
Cluster Pulumi.Confluent Cloud. Inputs. Get Kafka Topic Kafka Cluster - Topic
Name string - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
- Credentials
Get
Kafka Topic Credentials - Http
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - Kafka
Cluster GetKafka Topic Kafka Cluster - Topic
Name string - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
- credentials
Get
Kafka Topic Credentials - http
Endpoint String - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - kafka
Cluster GetKafka Topic Kafka Cluster - topic
Name String - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
- credentials
Get
Kafka Topic Credentials - http
Endpoint string - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - kafka
Cluster GetKafka Topic Kafka Cluster - topic
Name string - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
- credentials
Get
Kafka Topic Credentials - http_
endpoint str - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - kafka_
cluster GetKafka Topic Kafka Cluster - topic_
name str - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
- credentials Property Map
- http
Endpoint String - The REST endpoint of the Kafka cluster, for example,
https://pkc-00000.us-central1.gcp.confluent.cloud:443). - kafka
Cluster Property Map - topic
Name String - The name of the topic, for example,
orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
getKafkaTopic Result
The following output properties are available:
- Config Dictionary<string, string>
- (Optional Map) The custom topic settings:
- Credentials
Pulumi.
Confluent Cloud. Outputs. Get Kafka Topic Credentials - Http
Endpoint string - Id string
- The provider-assigned unique ID for this managed resource.
- Kafka
Cluster Pulumi.Confluent Cloud. Outputs. Get Kafka Topic Kafka Cluster - Partitions
Count int - (Required Number) The number of partitions to create in the topic. Defaults to
6. - Topic
Name string
- Config map[string]string
- (Optional Map) The custom topic settings:
- Credentials
Get
Kafka Topic Credentials - Http
Endpoint string - Id string
- The provider-assigned unique ID for this managed resource.
- Kafka
Cluster GetKafka Topic Kafka Cluster - Partitions
Count int - (Required Number) The number of partitions to create in the topic. Defaults to
6. - Topic
Name string
- config Map<String,String>
- (Optional Map) The custom topic settings:
- credentials
Get
Kafka Topic Credentials - http
Endpoint String - id String
- The provider-assigned unique ID for this managed resource.
- kafka
Cluster GetKafka Topic Kafka Cluster - partitions
Count Integer - (Required Number) The number of partitions to create in the topic. Defaults to
6. - topic
Name String
- config {[key: string]: string}
- (Optional Map) The custom topic settings:
- credentials
Get
Kafka Topic Credentials - http
Endpoint string - id string
- The provider-assigned unique ID for this managed resource.
- kafka
Cluster GetKafka Topic Kafka Cluster - partitions
Count number - (Required Number) The number of partitions to create in the topic. Defaults to
6. - topic
Name string
- config Mapping[str, str]
- (Optional Map) The custom topic settings:
- credentials
Get
Kafka Topic Credentials - http_
endpoint str - id str
- The provider-assigned unique ID for this managed resource.
- kafka_
cluster GetKafka Topic Kafka Cluster - partitions_
count int - (Required Number) The number of partitions to create in the topic. Defaults to
6. - topic_
name str
- config Map<String>
- (Optional Map) The custom topic settings:
- credentials Property Map
- http
Endpoint String - id String
- The provider-assigned unique ID for this managed resource.
- kafka
Cluster Property Map - partitions
Count Number - (Required Number) The number of partitions to create in the topic. Defaults to
6. - topic
Name String
Supporting Types
GetKafkaTopicCredentials
GetKafkaTopicKafkaCluster
- Id string
- The ID of the Kafka cluster, for example,
lkc-abc123.
- Id string
- The ID of the Kafka cluster, for example,
lkc-abc123.
- id String
- The ID of the Kafka cluster, for example,
lkc-abc123.
- id string
- The ID of the Kafka cluster, for example,
lkc-abc123.
- id str
- The ID of the Kafka cluster, for example,
lkc-abc123.
- id String
- The ID of the Kafka cluster, for example,
lkc-abc123.
Package Details
- Repository
- Confluent Cloud pulumi/pulumi-confluentcloud
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
confluentTerraform Provider.
Viewing docs for Confluent v0.1.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
published on Monday, Mar 9, 2026 by Pulumi
