1. Packages
  2. Packages
  3. Confluent Provider
  4. API Docs
  5. getKafkaTopic
Viewing docs for Confluent v0.1.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
confluentcloud logo
Viewing docs for Confluent v0.1.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    confluentcloud.KafkaTopic describes a Kafka Topic data source.

    Example Usage

    using Pulumi;
    using ConfluentCloud = Pulumi.ConfluentCloud;
    
    class MyStack : Stack
    {
        public MyStack()
        {
            var orders = Output.Create(ConfluentCloud.GetKafkaTopic.InvokeAsync(new ConfluentCloud.GetKafkaTopicArgs
            {
                KafkaCluster = new ConfluentCloud.Inputs.GetKafkaTopicKafkaClusterArgs
                {
                    Id = confluent_kafka_cluster.Basic_cluster.Id,
                },
                TopicName = "orders",
                HttpEndpoint = confluent_kafka_cluster.Basic_cluster.Http_endpoint,
                Credentials = new ConfluentCloud.Inputs.GetKafkaTopicCredentialsArgs
                {
                    Key = "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
                    Secret = "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
                },
            }));
            this.Config = orders.Apply(orders => orders.Config);
        }
    
        [Output("config")]
        public Output<string> Config { get; set; }
    }
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-confluentcloud/sdk/go/confluentcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		orders, err := confluentcloud.LookupKafkaTopic(ctx, &GetKafkaTopicArgs{
    			KafkaCluster: GetKafkaTopicKafkaCluster{
    				Id: confluent_kafka_cluster.Basic - cluster.Id,
    			},
    			TopicName:    "orders",
    			HttpEndpoint: confluent_kafka_cluster.Basic - cluster.Http_endpoint,
    			Credentials: GetKafkaTopicCredentials{
    				Key:    "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
    				Secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
    			},
    		}, nil)
    		if err != nil {
    			return err
    		}
    		ctx.Export("config", orders.Config)
    		return nil
    	})
    }
    
    package generated_program;
    
    import java.util.*;
    import java.io.*;
    import java.nio.*;
    import com.pulumi.*;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            final var orders = Output.of(ConfluentcloudFunctions.getKafkaTopic(GetKafkaTopicArgs.builder()
                .kafkaCluster(GetKafkaTopicKafkaClusterArgs.builder()
                    .id(confluent_kafka_cluster.basic-cluster().id())
                    .build())
                .topicName("orders")
                .httpEndpoint(confluent_kafka_cluster.basic-cluster().http_endpoint())
                .credentials(GetKafkaTopicCredentialsArgs.builder()
                    .key("<Kafka API Key for confluent_kafka_cluster.basic-cluster>")
                    .secret("<Kafka API Secret for confluent_kafka_cluster.basic-cluster>")
                    .build())
                .build()));
    
            ctx.export("config", orders.apply(getKafkaTopicResult -> getKafkaTopicResult.config()));
        }
    }
    
    import * as pulumi from "@pulumi/pulumi";
    import * as confluentcloud from "@pulumi/confluentcloud";
    
    const orders = confluentcloud.getKafkaTopic({
        kafkaCluster: {
            id: confluent_kafka_cluster["basic-cluster"].id,
        },
        topicName: "orders",
        httpEndpoint: confluent_kafka_cluster["basic-cluster"].http_endpoint,
        credentials: {
            key: "<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
            secret: "<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
        },
    });
    export const config = orders.then(orders => orders.config);
    
    import pulumi
    import pulumi_confluentcloud as confluentcloud
    
    orders = confluentcloud.get_kafka_topic(kafka_cluster=confluentcloud.GetKafkaTopicKafkaClusterArgs(
            id=confluent_kafka_cluster["basic-cluster"]["id"],
        ),
        topic_name="orders",
        http_endpoint=confluent_kafka_cluster["basic-cluster"]["http_endpoint"],
        credentials=confluentcloud.GetKafkaTopicCredentialsArgs(
            key="<Kafka API Key for confluent_kafka_cluster.basic-cluster>",
            secret="<Kafka API Secret for confluent_kafka_cluster.basic-cluster>",
        ))
    pulumi.export("config", orders.config)
    
    variables:
      orders:
        Fn::Invoke:
          Function: confluentcloud:getKafkaTopic
          Arguments:
            kafkaCluster:
              id: ${confluent_kafka_cluster"basic-cluster"[%!s(MISSING)].id}
            topicName: orders
            httpEndpoint: ${confluent_kafka_cluster"basic-cluster"[%!s(MISSING)].http_endpoint}
            credentials:
              key: <Kafka API Key for confluent_kafka_cluster.basic-cluster>
              secret: <Kafka API Secret for confluent_kafka_cluster.basic-cluster>
    outputs:
      config: ${orders.config}
    

    Using getKafkaTopic

    Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.

    function getKafkaTopic(args: GetKafkaTopicArgs, opts?: InvokeOptions): Promise<GetKafkaTopicResult>
    function getKafkaTopicOutput(args: GetKafkaTopicOutputArgs, opts?: InvokeOptions): Output<GetKafkaTopicResult>
    def get_kafka_topic(credentials: Optional[GetKafkaTopicCredentials] = None,
                        http_endpoint: Optional[str] = None,
                        kafka_cluster: Optional[GetKafkaTopicKafkaCluster] = None,
                        topic_name: Optional[str] = None,
                        opts: Optional[InvokeOptions] = None) -> GetKafkaTopicResult
    def get_kafka_topic_output(credentials: Optional[pulumi.Input[GetKafkaTopicCredentialsArgs]] = None,
                        http_endpoint: Optional[pulumi.Input[str]] = None,
                        kafka_cluster: Optional[pulumi.Input[GetKafkaTopicKafkaClusterArgs]] = None,
                        topic_name: Optional[pulumi.Input[str]] = None,
                        opts: Optional[InvokeOptions] = None) -> Output[GetKafkaTopicResult]
    func LookupKafkaTopic(ctx *Context, args *LookupKafkaTopicArgs, opts ...InvokeOption) (*LookupKafkaTopicResult, error)
    func LookupKafkaTopicOutput(ctx *Context, args *LookupKafkaTopicOutputArgs, opts ...InvokeOption) LookupKafkaTopicResultOutput

    > Note: This function is named LookupKafkaTopic in the Go SDK.

    public static class GetKafkaTopic 
    {
        public static Task<GetKafkaTopicResult> InvokeAsync(GetKafkaTopicArgs args, InvokeOptions? opts = null)
        public static Output<GetKafkaTopicResult> Invoke(GetKafkaTopicInvokeArgs args, InvokeOptions? opts = null)
    }
    public static CompletableFuture<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
    public static Output<GetKafkaTopicResult> getKafkaTopic(GetKafkaTopicArgs args, InvokeOptions options)
    
    fn::invoke:
      function: confluentcloud:index/getKafkaTopic:getKafkaTopic
      arguments:
        # arguments dictionary

    The following arguments are supported:

    Credentials Pulumi.ConfluentCloud.Inputs.GetKafkaTopicCredentials
    HttpEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    KafkaCluster Pulumi.ConfluentCloud.Inputs.GetKafkaTopicKafkaCluster
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    Credentials GetKafkaTopicCredentials
    HttpEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    KafkaCluster GetKafkaTopicKafkaCluster
    TopicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    httpEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    kafkaCluster GetKafkaTopicKafkaCluster
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    httpEndpoint string
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    kafkaCluster GetKafkaTopicKafkaCluster
    topicName string
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials GetKafkaTopicCredentials
    http_endpoint str
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    kafka_cluster GetKafkaTopicKafkaCluster
    topic_name str
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.
    credentials Property Map
    httpEndpoint String
    The REST endpoint of the Kafka cluster, for example, https://pkc-00000.us-central1.gcp.confluent.cloud:443).
    kafkaCluster Property Map
    topicName String
    The name of the topic, for example, orders-1. The topic name can be up to 255 characters in length and can contain only alphanumeric characters, hyphens, and underscores.

    getKafkaTopic Result

    The following output properties are available:

    Config Dictionary<string, string>
    (Optional Map) The custom topic settings:
    Credentials Pulumi.ConfluentCloud.Outputs.GetKafkaTopicCredentials
    HttpEndpoint string
    Id string
    The provider-assigned unique ID for this managed resource.
    KafkaCluster Pulumi.ConfluentCloud.Outputs.GetKafkaTopicKafkaCluster
    PartitionsCount int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    TopicName string
    Config map[string]string
    (Optional Map) The custom topic settings:
    Credentials GetKafkaTopicCredentials
    HttpEndpoint string
    Id string
    The provider-assigned unique ID for this managed resource.
    KafkaCluster GetKafkaTopicKafkaCluster
    PartitionsCount int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    TopicName string
    config Map<String,String>
    (Optional Map) The custom topic settings:
    credentials GetKafkaTopicCredentials
    httpEndpoint String
    id String
    The provider-assigned unique ID for this managed resource.
    kafkaCluster GetKafkaTopicKafkaCluster
    partitionsCount Integer
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    topicName String
    config {[key: string]: string}
    (Optional Map) The custom topic settings:
    credentials GetKafkaTopicCredentials
    httpEndpoint string
    id string
    The provider-assigned unique ID for this managed resource.
    kafkaCluster GetKafkaTopicKafkaCluster
    partitionsCount number
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    topicName string
    config Mapping[str, str]
    (Optional Map) The custom topic settings:
    credentials GetKafkaTopicCredentials
    http_endpoint str
    id str
    The provider-assigned unique ID for this managed resource.
    kafka_cluster GetKafkaTopicKafkaCluster
    partitions_count int
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    topic_name str
    config Map<String>
    (Optional Map) The custom topic settings:
    credentials Property Map
    httpEndpoint String
    id String
    The provider-assigned unique ID for this managed resource.
    kafkaCluster Property Map
    partitionsCount Number
    (Required Number) The number of partitions to create in the topic. Defaults to 6.
    topicName String

    Supporting Types

    GetKafkaTopicCredentials

    Key string
    The Kafka API Key.
    Secret string
    The Kafka API Secret.
    Key string
    The Kafka API Key.
    Secret string
    The Kafka API Secret.
    key String
    The Kafka API Key.
    secret String
    The Kafka API Secret.
    key string
    The Kafka API Key.
    secret string
    The Kafka API Secret.
    key str
    The Kafka API Key.
    secret str
    The Kafka API Secret.
    key String
    The Kafka API Key.
    secret String
    The Kafka API Secret.

    GetKafkaTopicKafkaCluster

    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    Id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.
    id string
    The ID of the Kafka cluster, for example, lkc-abc123.
    id str
    The ID of the Kafka cluster, for example, lkc-abc123.
    id String
    The ID of the Kafka cluster, for example, lkc-abc123.

    Package Details

    Repository
    Confluent Cloud pulumi/pulumi-confluentcloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the confluent Terraform Provider.
    confluentcloud logo
    Viewing docs for Confluent v0.1.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.