1. Packages
  2. Upstash
  3. API Docs
  4. KafkaConnector
Upstash v0.2.0 published on Wednesday, May 24, 2023 by Upstash

upstash.KafkaConnector

Explore with Pulumi AI

upstash logo
Upstash v0.2.0 published on Wednesday, May 24, 2023 by Upstash

    Example Usage

    using Pulumi;
    using Upstash = Pulumi.Upstash;
    
    class MyStack : Stack
    {
        public MyStack()
        {
            // Not necessary if the topic belongs to an already created cluster.
            var exampleKafkaCluster = new Upstash.KafkaCluster("exampleKafkaCluster", new Upstash.KafkaClusterArgs
            {
                ClusterName = "Terraform_Upstash_Cluster",
                Region = "eu-west-1",
                Multizone = false,
            });
            var exampleKafkaTopic = new Upstash.KafkaTopic("exampleKafkaTopic", new Upstash.KafkaTopicArgs
            {
                TopicName = "TerraformTopic",
                Partitions = 1,
                RetentionTime = 625135,
                RetentionSize = 725124,
                MaxMessageSize = 829213,
                CleanupPolicy = "delete",
                ClusterId = resource.Upstash_kafka_cluster.ExampleKafkaCluster.Cluster_id,
            });
            var exampleKafkaConnector = new Upstash.KafkaConnector("exampleKafkaConnector", new Upstash.KafkaConnectorArgs
            {
                ClusterId = exampleKafkaCluster.ClusterId,
                Properties = 
                {
                    { "collection", "user123" },
                    { "connection.uri", "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority" },
                    { "connector.class", "com.mongodb.kafka.connect.MongoSourceConnector" },
                    { "database", "myshinynewdb2" },
                    { "topics", exampleKafkaTopic.TopicName },
                },
            });
            // OPTIONAL: change between restart-running-paused
            // running_state = "running"
        }
    
    }
    
    package main
    
    import (
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    	"github.com/upstash/pulumi-upstash/sdk/go/upstash"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		exampleKafkaCluster, err := upstash.NewKafkaCluster(ctx, "exampleKafkaCluster", &upstash.KafkaClusterArgs{
    			ClusterName: pulumi.String("Terraform_Upstash_Cluster"),
    			Region:      pulumi.String("eu-west-1"),
    			Multizone:   pulumi.Bool(false),
    		})
    		if err != nil {
    			return err
    		}
    		exampleKafkaTopic, err := upstash.NewKafkaTopic(ctx, "exampleKafkaTopic", &upstash.KafkaTopicArgs{
    			TopicName:      pulumi.String("TerraformTopic"),
    			Partitions:     pulumi.Int(1),
    			RetentionTime:  pulumi.Int(625135),
    			RetentionSize:  pulumi.Int(725124),
    			MaxMessageSize: pulumi.Int(829213),
    			CleanupPolicy:  pulumi.String("delete"),
    			ClusterId:      pulumi.Any(resource.Upstash_kafka_cluster.ExampleKafkaCluster.Cluster_id),
    		})
    		if err != nil {
    			return err
    		}
    		_, err = upstash.NewKafkaConnector(ctx, "exampleKafkaConnector", &upstash.KafkaConnectorArgs{
    			ClusterId: exampleKafkaCluster.ClusterId,
    			Properties: pulumi.AnyMap{
    				"collection":      pulumi.Any("user123"),
    				"connection.uri":  pulumi.Any("mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority"),
    				"connector.class": pulumi.Any("com.mongodb.kafka.connect.MongoSourceConnector"),
    				"database":        pulumi.Any("myshinynewdb2"),
    				"topics":          exampleKafkaTopic.TopicName,
    			},
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import java.util.*;
    import java.io.*;
    import java.nio.*;
    import com.pulumi.*;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var exampleKafkaCluster = new KafkaCluster("exampleKafkaCluster", KafkaClusterArgs.builder()        
                .clusterName("Terraform_Upstash_Cluster")
                .region("eu-west-1")
                .multizone(false)
                .build());
    
            var exampleKafkaTopic = new KafkaTopic("exampleKafkaTopic", KafkaTopicArgs.builder()        
                .topicName("TerraformTopic")
                .partitions(1)
                .retentionTime(625135)
                .retentionSize(725124)
                .maxMessageSize(829213)
                .cleanupPolicy("delete")
                .clusterId(resource.upstash_kafka_cluster().exampleKafkaCluster().cluster_id())
                .build());
    
            var exampleKafkaConnector = new KafkaConnector("exampleKafkaConnector", KafkaConnectorArgs.builder()        
                .clusterId(exampleKafkaCluster.clusterId())
                .properties(Map.ofEntries(
                    Map.entry("collection", "user123"),
                    Map.entry("connection.uri", "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority"),
                    Map.entry("connector.class", "com.mongodb.kafka.connect.MongoSourceConnector"),
                    Map.entry("database", "myshinynewdb2"),
                    Map.entry("topics", exampleKafkaTopic.topicName())
                ))
                .build());
    
        }
    }
    
    import pulumi
    import upstash_pulumi as upstash
    
    # Not necessary if the topic belongs to an already created cluster.
    example_kafka_cluster = upstash.KafkaCluster("exampleKafkaCluster",
        cluster_name="Terraform_Upstash_Cluster",
        region="eu-west-1",
        multizone=False)
    example_kafka_topic = upstash.KafkaTopic("exampleKafkaTopic",
        topic_name="TerraformTopic",
        partitions=1,
        retention_time=625135,
        retention_size=725124,
        max_message_size=829213,
        cleanup_policy="delete",
        cluster_id=resource["upstash_kafka_cluster"]["exampleKafkaCluster"]["cluster_id"])
    example_kafka_connector = upstash.KafkaConnector("exampleKafkaConnector",
        cluster_id=example_kafka_cluster.cluster_id,
        properties={
            "collection": "user123",
            "connection.uri": "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority",
            "connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
            "database": "myshinynewdb2",
            "topics": example_kafka_topic.topic_name,
        })
    # OPTIONAL: change between restart-running-paused
    # running_state = "running"
    
    import * as pulumi from "@pulumi/pulumi";
    import * as pulumi from "@upstash/pulumi";
    
    // Not necessary if the topic belongs to an already created cluster.
    const exampleKafkaCluster = new upstash.KafkaCluster("exampleKafkaCluster", {
        clusterName: "Terraform_Upstash_Cluster",
        region: "eu-west-1",
        multizone: false,
    });
    const exampleKafkaTopic = new upstash.KafkaTopic("exampleKafkaTopic", {
        topicName: "TerraformTopic",
        partitions: 1,
        retentionTime: 625135,
        retentionSize: 725124,
        maxMessageSize: 829213,
        cleanupPolicy: "delete",
        clusterId: resource.upstash_kafka_cluster.exampleKafkaCluster.cluster_id,
    });
    const exampleKafkaConnector = new upstash.KafkaConnector("exampleKafkaConnector", {
        clusterId: exampleKafkaCluster.clusterId,
        properties: {
            collection: "user123",
            "connection.uri": "mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority",
            "connector.class": "com.mongodb.kafka.connect.MongoSourceConnector",
            database: "myshinynewdb2",
            topics: exampleKafkaTopic.topicName,
        },
    });
    // OPTIONAL: change between restart-running-paused
    // running_state = "running"
    
    resources:
      exampleKafkaCluster:
        type: upstash:KafkaCluster
        properties:
          clusterName: Terraform_Upstash_Cluster
          region: eu-west-1
          multizone: false
      exampleKafkaTopic:
        type: upstash:KafkaTopic
        properties:
          topicName: TerraformTopic
          partitions: 1
          retentionTime: 625135
          retentionSize: 725124
          maxMessageSize: 829213
          cleanupPolicy: delete
          clusterId: ${resource.upstash_kafka_cluster.exampleKafkaCluster.cluster_id}
      exampleKafkaConnector:
        type: upstash:KafkaConnector
        properties:
          clusterId: ${exampleKafkaCluster.clusterId}
          properties:
            collection: user123
            connection.uri: mongodb+srv://test:test@cluster0.fohyg7p.mongodb.net/?retryWrites=true&w=majority
            connector.class: com.mongodb.kafka.connect.MongoSourceConnector
            database: myshinynewdb2
            topics: ${exampleKafkaTopic.topicName}
    

    Create KafkaConnector Resource

    new KafkaConnector(name: string, args: KafkaConnectorArgs, opts?: CustomResourceOptions);
    @overload
    def KafkaConnector(resource_name: str,
                       opts: Optional[ResourceOptions] = None,
                       cluster_id: Optional[str] = None,
                       name: Optional[str] = None,
                       properties: Optional[Mapping[str, Any]] = None,
                       running_state: Optional[str] = None)
    @overload
    def KafkaConnector(resource_name: str,
                       args: KafkaConnectorArgs,
                       opts: Optional[ResourceOptions] = None)
    func NewKafkaConnector(ctx *Context, name string, args KafkaConnectorArgs, opts ...ResourceOption) (*KafkaConnector, error)
    public KafkaConnector(string name, KafkaConnectorArgs args, CustomResourceOptions? opts = null)
    public KafkaConnector(String name, KafkaConnectorArgs args)
    public KafkaConnector(String name, KafkaConnectorArgs args, CustomResourceOptions options)
    
    type: upstash:KafkaConnector
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args KafkaConnectorArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args KafkaConnectorArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args KafkaConnectorArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args KafkaConnectorArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args KafkaConnectorArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    KafkaConnector Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The KafkaConnector resource accepts the following input properties:

    ClusterId string
    Name of the connector
    Properties Dictionary<string, object>
    Properties that the connector will have
    Name string
    Name of the connector
    RunningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    ClusterId string
    Name of the connector
    Properties map[string]interface{}
    Properties that the connector will have
    Name string
    Name of the connector
    RunningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId String
    Name of the connector
    properties Map<String,Object>
    Properties that the connector will have
    name String
    Name of the connector
    runningState String
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId string
    Name of the connector
    properties {[key: string]: any}
    Properties that the connector will have
    name string
    Name of the connector
    runningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    cluster_id str
    Name of the connector
    properties Mapping[str, Any]
    Properties that the connector will have
    name str
    Name of the connector
    running_state str
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId String
    Name of the connector
    properties Map<Any>
    Properties that the connector will have
    name String
    Name of the connector
    runningState String
    Running state of the connector. Can be either 'paused', 'running' or 'restart'

    Outputs

    All input properties are implicitly available as output properties. Additionally, the KafkaConnector resource produces the following output properties:

    ConnectorId string
    Unique Connector ID for created connector
    CreationTime int
    Creation of the connector
    Id string
    The provider-assigned unique ID for this managed resource.
    ConnectorId string
    Unique Connector ID for created connector
    CreationTime int
    Creation of the connector
    Id string
    The provider-assigned unique ID for this managed resource.
    connectorId String
    Unique Connector ID for created connector
    creationTime Integer
    Creation of the connector
    id String
    The provider-assigned unique ID for this managed resource.
    connectorId string
    Unique Connector ID for created connector
    creationTime number
    Creation of the connector
    id string
    The provider-assigned unique ID for this managed resource.
    connector_id str
    Unique Connector ID for created connector
    creation_time int
    Creation of the connector
    id str
    The provider-assigned unique ID for this managed resource.
    connectorId String
    Unique Connector ID for created connector
    creationTime Number
    Creation of the connector
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing KafkaConnector Resource

    Get an existing KafkaConnector resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: KafkaConnectorState, opts?: CustomResourceOptions): KafkaConnector
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            cluster_id: Optional[str] = None,
            connector_id: Optional[str] = None,
            creation_time: Optional[int] = None,
            name: Optional[str] = None,
            properties: Optional[Mapping[str, Any]] = None,
            running_state: Optional[str] = None) -> KafkaConnector
    func GetKafkaConnector(ctx *Context, name string, id IDInput, state *KafkaConnectorState, opts ...ResourceOption) (*KafkaConnector, error)
    public static KafkaConnector Get(string name, Input<string> id, KafkaConnectorState? state, CustomResourceOptions? opts = null)
    public static KafkaConnector get(String name, Output<String> id, KafkaConnectorState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ClusterId string
    Name of the connector
    ConnectorId string
    Unique Connector ID for created connector
    CreationTime int
    Creation of the connector
    Name string
    Name of the connector
    Properties Dictionary<string, object>
    Properties that the connector will have
    RunningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    ClusterId string
    Name of the connector
    ConnectorId string
    Unique Connector ID for created connector
    CreationTime int
    Creation of the connector
    Name string
    Name of the connector
    Properties map[string]interface{}
    Properties that the connector will have
    RunningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId String
    Name of the connector
    connectorId String
    Unique Connector ID for created connector
    creationTime Integer
    Creation of the connector
    name String
    Name of the connector
    properties Map<String,Object>
    Properties that the connector will have
    runningState String
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId string
    Name of the connector
    connectorId string
    Unique Connector ID for created connector
    creationTime number
    Creation of the connector
    name string
    Name of the connector
    properties {[key: string]: any}
    Properties that the connector will have
    runningState string
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    cluster_id str
    Name of the connector
    connector_id str
    Unique Connector ID for created connector
    creation_time int
    Creation of the connector
    name str
    Name of the connector
    properties Mapping[str, Any]
    Properties that the connector will have
    running_state str
    Running state of the connector. Can be either 'paused', 'running' or 'restart'
    clusterId String
    Name of the connector
    connectorId String
    Unique Connector ID for created connector
    creationTime Number
    Creation of the connector
    name String
    Name of the connector
    properties Map<Any>
    Properties that the connector will have
    runningState String
    Running state of the connector. Can be either 'paused', 'running' or 'restart'

    Package Details

    Repository
    upstash upstash/pulumi-upstash
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the upstash Terraform Provider.
    upstash logo
    Upstash v0.2.0 published on Wednesday, May 24, 2023 by Upstash