Ready to level-up your engineering skills? Join a Pulumi Workshop. Register Now

KafkaConnect

# Kafka Connect Resource

The Kafka Connect resource allows the creation and management of Aiven Kafka Connect services.

Example Usage

using Pulumi;
using Aiven = Pulumi.Aiven;

class MyStack : Stack
{
    public MyStack()
    {
        var kc1 = new Aiven.KafkaConnect("kc1", new Aiven.KafkaConnectArgs
        {
            Project = data.Aiven_project.Pr1.Project,
            CloudName = "google-europe-west1",
            Plan = "startup-4",
            ServiceName = "my-kc1",
            MaintenanceWindowDow = "monday",
            MaintenanceWindowTime = "10:00:00",
            KafkaConnectUserConfig = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigArgs
            {
                KafkaConnect = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigKafkaConnectArgs
                {
                    ConsumerIsolationLevel = "read_committed",
                },
                PublicAccess = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigPublicAccessArgs
                {
                    KafkaConnect = "true",
                },
            },
        });
    }

}
package main

import (
    "github.com/pulumi/pulumi-aiven/sdk/v3/go/aiven"
    "github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)

func main() {
    pulumi.Run(func(ctx *pulumi.Context) error {
        _, err := aiven.NewKafkaConnect(ctx, "kc1", &aiven.KafkaConnectArgs{
            Project:               pulumi.Any(data.Aiven_project.Pr1.Project),
            CloudName:             pulumi.String("google-europe-west1"),
            Plan:                  pulumi.String("startup-4"),
            ServiceName:           pulumi.String("my-kc1"),
            MaintenanceWindowDow:  pulumi.String("monday"),
            MaintenanceWindowTime: pulumi.String("10:00:00"),
            KafkaConnectUserConfig: &aiven.KafkaConnectKafkaConnectUserConfigArgs{
                KafkaConnect: &aiven.KafkaConnectKafkaConnectUserConfigKafkaConnectArgs{
                    ConsumerIsolationLevel: pulumi.String("read_committed"),
                },
                PublicAccess: &aiven.KafkaConnectKafkaConnectUserConfigPublicAccessArgs{
                    KafkaConnect: pulumi.String("true"),
                },
            },
        })
        if err != nil {
            return err
        }
        return nil
    })
}
import pulumi
import pulumi_aiven as aiven

kc1 = aiven.KafkaConnect("kc1",
    project=data["aiven_project"]["pr1"]["project"],
    cloud_name="google-europe-west1",
    plan="startup-4",
    service_name="my-kc1",
    maintenance_window_dow="monday",
    maintenance_window_time="10:00:00",
    kafka_connect_user_config=aiven.KafkaConnectKafkaConnectUserConfigArgs(
        kafka_connect={
            "consumerIsolationLevel": "read_committed",
        },
        public_access=aiven.KafkaConnectKafkaConnectUserConfigPublicAccessArgs(
            kafka_connect=True,
        ),
    ))
import * as pulumi from "@pulumi/pulumi";
import * as aiven from "@pulumi/aiven";

const kc1 = new aiven.KafkaConnect("kc1", {
    project: data.aiven_project.pr1.project,
    cloudName: "google-europe-west1",
    plan: "startup-4",
    serviceName: "my-kc1",
    maintenanceWindowDow: "monday",
    maintenanceWindowTime: "10:00:00",
    kafkaConnectUserConfig: {
        kafkaConnect: {
            consumerIsolationLevel: "read_committed",
        },
        publicAccess: {
            kafkaConnect: true,
        },
    },
});

Create a KafkaConnect Resource

new KafkaConnect(name: string, args: KafkaConnectArgs, opts?: CustomResourceOptions);
def KafkaConnect(resource_name: str, opts: Optional[ResourceOptions] = None, cloud_name: Optional[str] = None, kafka_connect: Optional[KafkaConnectKafkaConnectArgs] = None, kafka_connect_user_config: Optional[KafkaConnectKafkaConnectUserConfigArgs] = None, maintenance_window_dow: Optional[str] = None, maintenance_window_time: Optional[str] = None, plan: Optional[str] = None, project: Optional[str] = None, project_vpc_id: Optional[str] = None, service_integrations: Optional[Sequence[KafkaConnectServiceIntegrationArgs]] = None, service_name: Optional[str] = None, termination_protection: Optional[bool] = None)
func NewKafkaConnect(ctx *Context, name string, args KafkaConnectArgs, opts ...ResourceOption) (*KafkaConnect, error)
public KafkaConnect(string name, KafkaConnectArgs args, CustomResourceOptions? opts = null)
name string
The unique name of the resource.
args KafkaConnectArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
opts ResourceOptions
A bag of options that control this resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args KafkaConnectArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args KafkaConnectArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.

KafkaConnect Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Programming Model docs.

Inputs

The KafkaConnect resource accepts the following input properties:

Project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

ServiceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

CloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

KafkaConnectServer KafkaConnectKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectUserConfig KafkaConnectKafkaConnectUserConfigArgs

defines kafka Connect specific additional configuration options. The following configuration options available:

MaintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

MaintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

Plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

ProjectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

ServiceIntegrations List<KafkaConnectServiceIntegrationArgs>

Service integrations to specify when creating a service. Not applied after initial service creation

TerminationProtection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

Project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

ServiceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

CloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

KafkaConnect KafkaConnectKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectUserConfig KafkaConnectKafkaConnectUserConfig

defines kafka Connect specific additional configuration options. The following configuration options available:

MaintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

MaintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

Plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

ProjectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

ServiceIntegrations []KafkaConnectServiceIntegration

Service integrations to specify when creating a service. Not applied after initial service creation

TerminationProtection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

serviceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

cloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

kafkaConnect KafkaConnectKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

kafkaConnectUserConfig KafkaConnectKafkaConnectUserConfig

defines kafka Connect specific additional configuration options. The following configuration options available:

maintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

maintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

projectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

serviceIntegrations KafkaConnectServiceIntegration[]

Service integrations to specify when creating a service. Not applied after initial service creation

terminationProtection boolean

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

project str

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

service_name str

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

cloud_name str

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

kafka_connect KafkaConnectKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

kafka_connect_user_config KafkaConnectKafkaConnectUserConfigArgs

defines kafka Connect specific additional configuration options. The following configuration options available:

maintenance_window_dow str

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

maintenance_window_time str

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

plan str

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

project_vpc_id str

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

service_integrations Sequence[KafkaConnectServiceIntegrationArgs]

Service integrations to specify when creating a service. Not applied after initial service creation

termination_protection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

Outputs

All input properties are implicitly available as output properties. Additionally, the KafkaConnect resource produces the following output properties:

Components List<KafkaConnectComponent>

Service component information objects

Id string
The provider-assigned unique ID for this managed resource.
ServiceHost string

Kafka Connect hostname.

ServicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

ServicePort int

Kafka Connect port.

ServiceType string

Aiven internal service type code

ServiceUri string

URI for connecting to the Kafka Connect service.

ServiceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

State string

Service state.

Components []KafkaConnectComponent

Service component information objects

Id string
The provider-assigned unique ID for this managed resource.
ServiceHost string

Kafka Connect hostname.

ServicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

ServicePort int

Kafka Connect port.

ServiceType string

Aiven internal service type code

ServiceUri string

URI for connecting to the Kafka Connect service.

ServiceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

State string

Service state.

components KafkaConnectComponent[]

Service component information objects

id string
The provider-assigned unique ID for this managed resource.
serviceHost string

Kafka Connect hostname.

servicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

servicePort number

Kafka Connect port.

serviceType string

Aiven internal service type code

serviceUri string

URI for connecting to the Kafka Connect service.

serviceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

state string

Service state.

components Sequence[KafkaConnectComponent]

Service component information objects

id str
The provider-assigned unique ID for this managed resource.
service_host str

Kafka Connect hostname.

service_password str

Password used for connecting to the Kafka Connect service, if applicable.

service_port int

Kafka Connect port.

service_type str

Aiven internal service type code

service_uri str

URI for connecting to the Kafka Connect service.

service_username str

Username used for connecting to the Kafka Connect service, if applicable.

state str

Service state.

Look up an Existing KafkaConnect Resource

Get an existing KafkaConnect resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: KafkaConnectState, opts?: CustomResourceOptions): KafkaConnect
@staticmethod
def get(resource_name: str, id: str, opts: Optional[ResourceOptions] = None, cloud_name: Optional[str] = None, components: Optional[Sequence[KafkaConnectComponentArgs]] = None, kafka_connect: Optional[KafkaConnectKafkaConnectArgs] = None, kafka_connect_user_config: Optional[KafkaConnectKafkaConnectUserConfigArgs] = None, maintenance_window_dow: Optional[str] = None, maintenance_window_time: Optional[str] = None, plan: Optional[str] = None, project: Optional[str] = None, project_vpc_id: Optional[str] = None, service_host: Optional[str] = None, service_integrations: Optional[Sequence[KafkaConnectServiceIntegrationArgs]] = None, service_name: Optional[str] = None, service_password: Optional[str] = None, service_port: Optional[int] = None, service_type: Optional[str] = None, service_uri: Optional[str] = None, service_username: Optional[str] = None, state: Optional[str] = None, termination_protection: Optional[bool] = None) -> KafkaConnect
func GetKafkaConnect(ctx *Context, name string, id IDInput, state *KafkaConnectState, opts ...ResourceOption) (*KafkaConnect, error)
public static KafkaConnect Get(string name, Input<string> id, KafkaConnectState? state, CustomResourceOptions? opts = null)
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.

The following state arguments are supported:

CloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

Components List<KafkaConnectComponentArgs>

Service component information objects

KafkaConnectServer KafkaConnectKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectUserConfig KafkaConnectKafkaConnectUserConfigArgs

defines kafka Connect specific additional configuration options. The following configuration options available:

MaintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

MaintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

Plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

Project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

ProjectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

ServiceHost string

Kafka Connect hostname.

ServiceIntegrations List<KafkaConnectServiceIntegrationArgs>

Service integrations to specify when creating a service. Not applied after initial service creation

ServiceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

ServicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

ServicePort int

Kafka Connect port.

ServiceType string

Aiven internal service type code

ServiceUri string

URI for connecting to the Kafka Connect service.

ServiceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

State string

Service state.

TerminationProtection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

CloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

Components []KafkaConnectComponent

Service component information objects

KafkaConnect KafkaConnectKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectUserConfig KafkaConnectKafkaConnectUserConfig

defines kafka Connect specific additional configuration options. The following configuration options available:

MaintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

MaintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

Plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

Project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

ProjectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

ServiceHost string

Kafka Connect hostname.

ServiceIntegrations []KafkaConnectServiceIntegration

Service integrations to specify when creating a service. Not applied after initial service creation

ServiceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

ServicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

ServicePort int

Kafka Connect port.

ServiceType string

Aiven internal service type code

ServiceUri string

URI for connecting to the Kafka Connect service.

ServiceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

State string

Service state.

TerminationProtection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

cloudName string

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

components KafkaConnectComponent[]

Service component information objects

kafkaConnect KafkaConnectKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

kafkaConnectUserConfig KafkaConnectKafkaConnectUserConfig

defines kafka Connect specific additional configuration options. The following configuration options available:

maintenanceWindowDow string

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

maintenanceWindowTime string

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

plan string

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

project string

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

projectVpcId string

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

serviceHost string

Kafka Connect hostname.

serviceIntegrations KafkaConnectServiceIntegration[]

Service integrations to specify when creating a service. Not applied after initial service creation

serviceName string

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

servicePassword string

Password used for connecting to the Kafka Connect service, if applicable.

servicePort number

Kafka Connect port.

serviceType string

Aiven internal service type code

serviceUri string

URI for connecting to the Kafka Connect service.

serviceUsername string

Username used for connecting to the Kafka Connect service, if applicable.

state string

Service state.

terminationProtection boolean

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

cloud_name str

defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (aws, azure, do google, upcloud, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.

components Sequence[KafkaConnectComponentArgs]

Service component information objects

kafka_connect KafkaConnectKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

kafka_connect_user_config KafkaConnectKafkaConnectUserConfigArgs

defines kafka Connect specific additional configuration options. The following configuration options available:

maintenance_window_dow str

day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.

maintenance_window_time str

time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.

plan str

defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are hobbyist, startup-x, business-x and premium-x where x is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.

project str

identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.

project_vpc_id str

optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.

service_host str

Kafka Connect hostname.

service_integrations Sequence[KafkaConnectServiceIntegrationArgs]

Service integrations to specify when creating a service. Not applied after initial service creation

service_name str

specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.

service_password str

Password used for connecting to the Kafka Connect service, if applicable.

service_port int

Kafka Connect port.

service_type str

Aiven internal service type code

service_uri str

URI for connecting to the Kafka Connect service.

service_username str

Username used for connecting to the Kafka Connect service, if applicable.

state str

Service state.

termination_protection bool

prevents the service from being deleted. It is recommended to set this to true for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.

Supporting Types

KafkaConnectComponent

Component string
Host string
KafkaAuthenticationMethod string
Port int
Route string
Ssl bool
Usage string
Component string
Host string
KafkaAuthenticationMethod string
Port int
Route string
Ssl bool
Usage string
component string
host string
kafkaAuthenticationMethod string
port number
route string
ssl boolean
usage string

KafkaConnectKafkaConnectUserConfig

IpFilters List<string>

allow incoming connections from CIDR address block, e.g. 10.20.0.0/16

KafkaConnect KafkaConnectKafkaConnectUserConfigKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

PrivateAccess KafkaConnectKafkaConnectUserConfigPrivateAccessArgs

Allow access to selected service ports from private networks.

PublicAccess KafkaConnectKafkaConnectUserConfigPublicAccessArgs

Allow access to selected service ports from the public Internet.

IpFilters []string

allow incoming connections from CIDR address block, e.g. 10.20.0.0/16

KafkaConnect KafkaConnectKafkaConnectUserConfigKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

PrivateAccess KafkaConnectKafkaConnectUserConfigPrivateAccess

Allow access to selected service ports from private networks.

PublicAccess KafkaConnectKafkaConnectUserConfigPublicAccess

Allow access to selected service ports from the public Internet.

ipFilters string[]

allow incoming connections from CIDR address block, e.g. 10.20.0.0/16

kafkaConnect KafkaConnectKafkaConnectUserConfigKafkaConnect

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

privateAccess KafkaConnectKafkaConnectUserConfigPrivateAccess

Allow access to selected service ports from private networks.

publicAccess KafkaConnectKafkaConnectUserConfigPublicAccess

Allow access to selected service ports from the public Internet.

ip_filters Sequence[str]

allow incoming connections from CIDR address block, e.g. 10.20.0.0/16

kafka_connect KafkaConnectKafkaConnectUserConfigKafkaConnectArgs

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

private_access KafkaConnectKafkaConnectUserConfigPrivateAccessArgs

Allow access to selected service ports from private networks.

public_access KafkaConnectKafkaConnectUserConfigPublicAccessArgs

Allow access to selected service ports from the public Internet.

KafkaConnectKafkaConnectUserConfigKafkaConnect

ConnectorClientConfigOverridePolicy string

Defines what client configurations can be overridden by the connector. Default is None.

ConsumerAutoOffsetReset string

What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.

ConsumerFetchMaxBytes string

Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.

ConsumerIsolationLevel string

Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.

ConsumerMaxPartitionFetchBytes string

Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.

ConsumerMaxPollIntervalMs string

The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).

ConsumerMaxPollRecords string

The maximum number of records returned by a single poll.

OffsetFlushIntervalMs string

The interval at which to try committing offsets for tasks (defaults to 60000).

OffsetFlushTimeoutMs string

Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).

ProducerMaxRequestSize string

This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.

SessionTimeoutMs string

The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).

ConnectorClientConfigOverridePolicy string

Defines what client configurations can be overridden by the connector. Default is None.

ConsumerAutoOffsetReset string

What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.

ConsumerFetchMaxBytes string

Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.

ConsumerIsolationLevel string

Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.

ConsumerMaxPartitionFetchBytes string

Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.

ConsumerMaxPollIntervalMs string

The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).

ConsumerMaxPollRecords string

The maximum number of records returned by a single poll.

OffsetFlushIntervalMs string

The interval at which to try committing offsets for tasks (defaults to 60000).

OffsetFlushTimeoutMs string

Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).

ProducerMaxRequestSize string

This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.

SessionTimeoutMs string

The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).

connectorClientConfigOverridePolicy string

Defines what client configurations can be overridden by the connector. Default is None.

consumerAutoOffsetReset string

What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.

consumerFetchMaxBytes string

Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.

consumerIsolationLevel string

Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.

consumerMaxPartitionFetchBytes string

Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.

consumerMaxPollIntervalMs string

The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).

consumerMaxPollRecords string

The maximum number of records returned by a single poll.

offsetFlushIntervalMs string

The interval at which to try committing offsets for tasks (defaults to 60000).

offsetFlushTimeoutMs string

Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).

producerMaxRequestSize string

This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.

sessionTimeoutMs string

The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).

connector_client_config_override_policy str

Defines what client configurations can be overridden by the connector. Default is None.

consumer_auto_offset_reset str

What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.

consumer_fetch_max_bytes str

Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.

consumer_isolation_level str

Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.

consumer_max_partition_fetch_bytes str

Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.

consumer_max_poll_interval_ms str

The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).

consumer_max_poll_records str

The maximum number of records returned by a single poll.

offset_flush_interval_ms str

The interval at which to try committing offsets for tasks (defaults to 60000).

offset_flush_timeout_ms str

Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).

producer_max_request_size str

This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.

session_timeout_ms str

The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).

KafkaConnectKafkaConnectUserConfigPrivateAccess

KafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

Prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

Prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

kafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

kafka_connect str

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

prometheus str

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectKafkaConnectUserConfigPublicAccess

KafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

Prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

Prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

kafkaConnect string

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

prometheus string

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

kafka_connect str

Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.

prometheus str

Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.

KafkaConnectServiceIntegration

Package Details

Repository
https://github.com/pulumi/pulumi-aiven
License
Apache-2.0
Notes
This Pulumi package is based on the aiven Terraform Provider.