KafkaConnect
# Kafka Connect Resource
The Kafka Connect resource allows the creation and management of Aiven Kafka Connect services.
Example Usage
using Pulumi;
using Aiven = Pulumi.Aiven;
class MyStack : Stack
{
public MyStack()
{
var kc1 = new Aiven.KafkaConnect("kc1", new Aiven.KafkaConnectArgs
{
Project = data.Aiven_project.Pr1.Project,
CloudName = "google-europe-west1",
Plan = "startup-4",
ServiceName = "my-kc1",
MaintenanceWindowDow = "monday",
MaintenanceWindowTime = "10:00:00",
KafkaConnectUserConfig = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigArgs
{
KafkaConnect = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigKafkaConnectArgs
{
ConsumerIsolationLevel = "read_committed",
},
PublicAccess = new Aiven.Inputs.KafkaConnectKafkaConnectUserConfigPublicAccessArgs
{
KafkaConnect = "true",
},
},
});
}
}
package main
import (
"github.com/pulumi/pulumi-aiven/sdk/v3/go/aiven"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := aiven.NewKafkaConnect(ctx, "kc1", &aiven.KafkaConnectArgs{
Project: pulumi.Any(data.Aiven_project.Pr1.Project),
CloudName: pulumi.String("google-europe-west1"),
Plan: pulumi.String("startup-4"),
ServiceName: pulumi.String("my-kc1"),
MaintenanceWindowDow: pulumi.String("monday"),
MaintenanceWindowTime: pulumi.String("10:00:00"),
KafkaConnectUserConfig: &aiven.KafkaConnectKafkaConnectUserConfigArgs{
KafkaConnect: &aiven.KafkaConnectKafkaConnectUserConfigKafkaConnectArgs{
ConsumerIsolationLevel: pulumi.String("read_committed"),
},
PublicAccess: &aiven.KafkaConnectKafkaConnectUserConfigPublicAccessArgs{
KafkaConnect: pulumi.String("true"),
},
},
})
if err != nil {
return err
}
return nil
})
}
import pulumi
import pulumi_aiven as aiven
kc1 = aiven.KafkaConnect("kc1",
project=data["aiven_project"]["pr1"]["project"],
cloud_name="google-europe-west1",
plan="startup-4",
service_name="my-kc1",
maintenance_window_dow="monday",
maintenance_window_time="10:00:00",
kafka_connect_user_config=aiven.KafkaConnectKafkaConnectUserConfigArgs(
kafka_connect={
"consumerIsolationLevel": "read_committed",
},
public_access=aiven.KafkaConnectKafkaConnectUserConfigPublicAccessArgs(
kafka_connect=True,
),
))
import * as pulumi from "@pulumi/pulumi";
import * as aiven from "@pulumi/aiven";
const kc1 = new aiven.KafkaConnect("kc1", {
project: data.aiven_project.pr1.project,
cloudName: "google-europe-west1",
plan: "startup-4",
serviceName: "my-kc1",
maintenanceWindowDow: "monday",
maintenanceWindowTime: "10:00:00",
kafkaConnectUserConfig: {
kafkaConnect: {
consumerIsolationLevel: "read_committed",
},
publicAccess: {
kafkaConnect: true,
},
},
});
Create a KafkaConnect Resource
new KafkaConnect(name: string, args: KafkaConnectArgs, opts?: CustomResourceOptions);
def KafkaConnect(resource_name: str, opts: Optional[ResourceOptions] = None, cloud_name: Optional[str] = None, kafka_connect: Optional[KafkaConnectKafkaConnectArgs] = None, kafka_connect_user_config: Optional[KafkaConnectKafkaConnectUserConfigArgs] = None, maintenance_window_dow: Optional[str] = None, maintenance_window_time: Optional[str] = None, plan: Optional[str] = None, project: Optional[str] = None, project_vpc_id: Optional[str] = None, service_integrations: Optional[Sequence[KafkaConnectServiceIntegrationArgs]] = None, service_name: Optional[str] = None, termination_protection: Optional[bool] = None)
func NewKafkaConnect(ctx *Context, name string, args KafkaConnectArgs, opts ...ResourceOption) (*KafkaConnect, error)
public KafkaConnect(string name, KafkaConnectArgs args, CustomResourceOptions? opts = null)
- name string
- The unique name of the resource.
- args KafkaConnectArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- opts ResourceOptions
- A bag of options that control this resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args KafkaConnectArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args KafkaConnectArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
KafkaConnect Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Programming Model docs.
Inputs
The KafkaConnect resource accepts the following input properties:
- Project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- Service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- Cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- Kafka
Connect KafkaServer Connect Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect KafkaUser Config Connect Kafka Connect User Config Args defines kafka Connect specific additional configuration options. The following configuration options available:
- Maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- Maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- Plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- Project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- Service
Integrations List<KafkaConnect Service Integration Args> Service integrations to specify when creating a service. Not applied after initial service creation
- Termination
Protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- Project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- Service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- Cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- Kafka
Connect KafkaConnect Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect KafkaUser Config Connect Kafka Connect User Config defines kafka Connect specific additional configuration options. The following configuration options available:
- Maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- Maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- Plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- Project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- Service
Integrations []KafkaConnect Service Integration Service integrations to specify when creating a service. Not applied after initial service creation
- Termination
Protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- kafka
Connect KafkaConnect Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka
Connect KafkaUser Config Connect Kafka Connect User Config defines kafka Connect specific additional configuration options. The following configuration options available:
- maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- service
Integrations KafkaConnect Service Integration[] Service integrations to specify when creating a service. Not applied after initial service creation
- termination
Protection boolean prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- project str
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- service_
name str specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- cloud_
name str defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- kafka_
connect KafkaConnect Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka_
connect_ Kafkauser_ config Connect Kafka Connect User Config Args defines kafka Connect specific additional configuration options. The following configuration options available:
- maintenance_
window_ strdow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- maintenance_
window_ strtime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- plan str
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- project_
vpc_ strid optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- service_
integrations Sequence[KafkaConnect Service Integration Args] Service integrations to specify when creating a service. Not applied after initial service creation
- termination_
protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
Outputs
All input properties are implicitly available as output properties. Additionally, the KafkaConnect resource produces the following output properties:
- Components
List<Kafka
Connect Component> Service component information objects
- Id string
- The provider-assigned unique ID for this managed resource.
- Service
Host string Kafka Connect hostname.
- Service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- Service
Port int Kafka Connect port.
- Service
Type string Aiven internal service type code
- Service
Uri string URI for connecting to the Kafka Connect service.
- Service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- State string
Service state.
- Components
[]Kafka
Connect Component Service component information objects
- Id string
- The provider-assigned unique ID for this managed resource.
- Service
Host string Kafka Connect hostname.
- Service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- Service
Port int Kafka Connect port.
- Service
Type string Aiven internal service type code
- Service
Uri string URI for connecting to the Kafka Connect service.
- Service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- State string
Service state.
- components
Kafka
Connect Component[] Service component information objects
- id string
- The provider-assigned unique ID for this managed resource.
- service
Host string Kafka Connect hostname.
- service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- service
Port number Kafka Connect port.
- service
Type string Aiven internal service type code
- service
Uri string URI for connecting to the Kafka Connect service.
- service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- state string
Service state.
- components
Sequence[Kafka
Connect Component] Service component information objects
- id str
- The provider-assigned unique ID for this managed resource.
- service_
host str Kafka Connect hostname.
- service_
password str Password used for connecting to the Kafka Connect service, if applicable.
- service_
port int Kafka Connect port.
- service_
type str Aiven internal service type code
- service_
uri str URI for connecting to the Kafka Connect service.
- service_
username str Username used for connecting to the Kafka Connect service, if applicable.
- state str
Service state.
Look up an Existing KafkaConnect Resource
Get an existing KafkaConnect resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: KafkaConnectState, opts?: CustomResourceOptions): KafkaConnect
@staticmethod
def get(resource_name: str, id: str, opts: Optional[ResourceOptions] = None, cloud_name: Optional[str] = None, components: Optional[Sequence[KafkaConnectComponentArgs]] = None, kafka_connect: Optional[KafkaConnectKafkaConnectArgs] = None, kafka_connect_user_config: Optional[KafkaConnectKafkaConnectUserConfigArgs] = None, maintenance_window_dow: Optional[str] = None, maintenance_window_time: Optional[str] = None, plan: Optional[str] = None, project: Optional[str] = None, project_vpc_id: Optional[str] = None, service_host: Optional[str] = None, service_integrations: Optional[Sequence[KafkaConnectServiceIntegrationArgs]] = None, service_name: Optional[str] = None, service_password: Optional[str] = None, service_port: Optional[int] = None, service_type: Optional[str] = None, service_uri: Optional[str] = None, service_username: Optional[str] = None, state: Optional[str] = None, termination_protection: Optional[bool] = None) -> KafkaConnect
func GetKafkaConnect(ctx *Context, name string, id IDInput, state *KafkaConnectState, opts ...ResourceOption) (*KafkaConnect, error)
public static KafkaConnect Get(string name, Input<string> id, KafkaConnectState? state, CustomResourceOptions? opts = null)
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
The following state arguments are supported:
- Cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- Components
List<Kafka
Connect Component Args> Service component information objects
- Kafka
Connect KafkaServer Connect Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect KafkaUser Config Connect Kafka Connect User Config Args defines kafka Connect specific additional configuration options. The following configuration options available:
- Maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- Maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- Plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- Project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- Project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- Service
Host string Kafka Connect hostname.
- Service
Integrations List<KafkaConnect Service Integration Args> Service integrations to specify when creating a service. Not applied after initial service creation
- Service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- Service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- Service
Port int Kafka Connect port.
- Service
Type string Aiven internal service type code
- Service
Uri string URI for connecting to the Kafka Connect service.
- Service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- State string
Service state.
- Termination
Protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- Cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- Components
[]Kafka
Connect Component Service component information objects
- Kafka
Connect KafkaConnect Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect KafkaUser Config Connect Kafka Connect User Config defines kafka Connect specific additional configuration options. The following configuration options available:
- Maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- Maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- Plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- Project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- Project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- Service
Host string Kafka Connect hostname.
- Service
Integrations []KafkaConnect Service Integration Service integrations to specify when creating a service. Not applied after initial service creation
- Service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- Service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- Service
Port int Kafka Connect port.
- Service
Type string Aiven internal service type code
- Service
Uri string URI for connecting to the Kafka Connect service.
- Service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- State string
Service state.
- Termination
Protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- cloud
Name string defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- components
Kafka
Connect Component[] Service component information objects
- kafka
Connect KafkaConnect Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka
Connect KafkaUser Config Connect Kafka Connect User Config defines kafka Connect specific additional configuration options. The following configuration options available:
- maintenance
Window stringDow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- maintenance
Window stringTime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- plan string
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- project string
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- project
Vpc stringId optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- service
Host string Kafka Connect hostname.
- service
Integrations KafkaConnect Service Integration[] Service integrations to specify when creating a service. Not applied after initial service creation
- service
Name string specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- service
Password string Password used for connecting to the Kafka Connect service, if applicable.
- service
Port number Kafka Connect port.
- service
Type string Aiven internal service type code
- service
Uri string URI for connecting to the Kafka Connect service.
- service
Username string Username used for connecting to the Kafka Connect service, if applicable.
- state string
Service state.
- termination
Protection boolean prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
- cloud_
name str defines where the cloud provider and region where the service is hosted in. This can be changed freely after service is created. Changing the value will trigger a potentially lengthy migration process for the service. Format is cloud provider name (
aws
,azure
,do
google
,upcloud
, etc.), dash, and the cloud provider specific region name. These are documented on each Cloud provider’s own support articles, like here for Google and here for AWS.- components
Sequence[Kafka
Connect Component Args] Service component information objects
- kafka_
connect KafkaConnect Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka_
connect_ Kafkauser_ config Connect Kafka Connect User Config Args defines kafka Connect specific additional configuration options. The following configuration options available:
- maintenance_
window_ strdow day of week when maintenance operations should be performed. On monday, tuesday, wednesday, etc.
- maintenance_
window_ strtime time of day when maintenance operations should be performed. UTC time in HH:mm:ss format.
- plan str
defines what kind of computing resources are allocated for the service. It can be changed after creation, though there are some restrictions when going to a smaller plan such as the new plan must have sufficient amount of disk space to store all current data and switching to a plan with fewer nodes might not be supported. The basic plan names are
hobbyist
,startup-x
,business-x
andpremium-x
wherex
is (roughly) the amount of memory on each node (also other attributes like number of CPUs and amount of disk space varies but naming is based on memory). The exact options can be seen from the Aiven web console’s Create Service dialog.- project str
identifies the project the service belongs to. To set up proper dependency between the project and the service, refer to the project as shown in the above example. Project cannot be changed later without destroying and re-creating the service.
- project_
vpc_ strid optionally specifies the VPC the service should run in. If the value is not set the service is not run inside a VPC. When set, the value should be given as a reference as shown above to set up dependencies correctly and the VPC must be in the same cloud and region as the service itself. Project can be freely moved to and from VPC after creation but doing so triggers migration to new servers so the operation can take significant amount of time to complete if the service has a lot of data.
- service_
host str Kafka Connect hostname.
- service_
integrations Sequence[KafkaConnect Service Integration Args] Service integrations to specify when creating a service. Not applied after initial service creation
- service_
name str specifies the actual name of the service. The name cannot be changed later without destroying and re-creating the service so name should be picked based on intended service usage rather than current attributes.
- service_
password str Password used for connecting to the Kafka Connect service, if applicable.
- service_
port int Kafka Connect port.
- service_
type str Aiven internal service type code
- service_
uri str URI for connecting to the Kafka Connect service.
- service_
username str Username used for connecting to the Kafka Connect service, if applicable.
- state str
Service state.
- termination_
protection bool prevents the service from being deleted. It is recommended to set this to
true
for all production services to prevent unintentional service deletion. This does not shield against deleting databases or topics but for services with backups much of the content can at least be restored from backup in case accidental deletion is done.
Supporting Types
KafkaConnectComponent
KafkaConnectKafkaConnectUserConfig
- Ip
Filters List<string> allow incoming connections from CIDR address block, e.g.
10.20.0.0/16
- Kafka
Connect KafkaConnect Kafka Connect User Config Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Private
Access KafkaConnect Kafka Connect User Config Private Access Args Allow access to selected service ports from private networks.
- Public
Access KafkaConnect Kafka Connect User Config Public Access Args Allow access to selected service ports from the public Internet.
- Ip
Filters []string allow incoming connections from CIDR address block, e.g.
10.20.0.0/16
- Kafka
Connect KafkaConnect Kafka Connect User Config Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Private
Access KafkaConnect Kafka Connect User Config Private Access Allow access to selected service ports from private networks.
- Public
Access KafkaConnect Kafka Connect User Config Public Access Allow access to selected service ports from the public Internet.
- ip
Filters string[] allow incoming connections from CIDR address block, e.g.
10.20.0.0/16
- kafka
Connect KafkaConnect Kafka Connect User Config Kafka Connect Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- private
Access KafkaConnect Kafka Connect User Config Private Access Allow access to selected service ports from private networks.
- public
Access KafkaConnect Kafka Connect User Config Public Access Allow access to selected service ports from the public Internet.
- ip_
filters Sequence[str] allow incoming connections from CIDR address block, e.g.
10.20.0.0/16
- kafka_
connect KafkaConnect Kafka Connect User Config Kafka Connect Args Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- private_
access KafkaConnect Kafka Connect User Config Private Access Args Allow access to selected service ports from private networks.
- public_
access KafkaConnect Kafka Connect User Config Public Access Args Allow access to selected service ports from the public Internet.
KafkaConnectKafkaConnectUserConfigKafkaConnect
- Connector
Client stringConfig Override Policy Defines what client configurations can be overridden by the connector. Default is None.
- Consumer
Auto stringOffset Reset What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.
- Consumer
Fetch stringMax Bytes Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.
- Consumer
Isolation stringLevel Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.
- Consumer
Max stringPartition Fetch Bytes Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.
- Consumer
Max stringPoll Interval Ms The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).
- Consumer
Max stringPoll Records The maximum number of records returned by a single poll.
- Offset
Flush stringInterval Ms The interval at which to try committing offsets for tasks (defaults to 60000).
- Offset
Flush stringTimeout Ms Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).
- Producer
Max stringRequest Size This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.
- Session
Timeout stringMs The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).
- Connector
Client stringConfig Override Policy Defines what client configurations can be overridden by the connector. Default is None.
- Consumer
Auto stringOffset Reset What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.
- Consumer
Fetch stringMax Bytes Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.
- Consumer
Isolation stringLevel Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.
- Consumer
Max stringPartition Fetch Bytes Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.
- Consumer
Max stringPoll Interval Ms The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).
- Consumer
Max stringPoll Records The maximum number of records returned by a single poll.
- Offset
Flush stringInterval Ms The interval at which to try committing offsets for tasks (defaults to 60000).
- Offset
Flush stringTimeout Ms Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).
- Producer
Max stringRequest Size This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.
- Session
Timeout stringMs The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).
- connector
Client stringConfig Override Policy Defines what client configurations can be overridden by the connector. Default is None.
- consumer
Auto stringOffset Reset What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.
- consumer
Fetch stringMax Bytes Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.
- consumer
Isolation stringLevel Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.
- consumer
Max stringPartition Fetch Bytes Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.
- consumer
Max stringPoll Interval Ms The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).
- consumer
Max stringPoll Records The maximum number of records returned by a single poll.
- offset
Flush stringInterval Ms The interval at which to try committing offsets for tasks (defaults to 60000).
- offset
Flush stringTimeout Ms Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).
- producer
Max stringRequest Size This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.
- session
Timeout stringMs The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).
- connector_
client_ strconfig_ override_ policy Defines what client configurations can be overridden by the connector. Default is None.
- consumer_
auto_ stroffset_ reset What to do when there is no initial offset in Kafka or if the current offset does not exist any more on the server. Default is earliest.
- consumer_
fetch_ strmax_ bytes Records are fetched in batches by the consumer, and if the first record batch in the first non-empty partition of the fetch is larger than this value, the record batch will still be returned to ensure that the consumer can make progress. As such, this is not a absolute maximum.
- consumer_
isolation_ strlevel Transaction read isolation level. read_uncommitted is the default, but read_committed can be used if consume-exactly-once behavior is desired.
- consumer_
max_ strpartition_ fetch_ bytes Records are fetched in batches by the consumer.If the first record batch in the first non-empty partition of the fetch is larger than this limit, the batch will still be returned to ensure that the consumer can make progress.
- consumer_
max_ strpoll_ interval_ ms The maximum delay in milliseconds between invocations of poll() when using consumer group management (defaults to 300000).
- consumer_
max_ strpoll_ records The maximum number of records returned by a single poll.
- offset_
flush_ strinterval_ ms The interval at which to try committing offsets for tasks (defaults to 60000).
- offset_
flush_ strtimeout_ ms Maximum number of milliseconds to wait for records to flush and partition offset data to be committed to offset storage before cancelling the process and restoring the offset data to be committed in a future attempt (defaults to 5000).
- producer_
max_ strrequest_ size This setting will limit the number of record batches the producer will send in a single request to avoid sending huge requests.
- session_
timeout_ strms The timeout in milliseconds used to detect failures when using Kafka’s group management facilities (defaults to 10000).
KafkaConnectKafkaConnectUserConfigPrivateAccess
- Kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka_
connect str Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- prometheus str
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
KafkaConnectKafkaConnectUserConfigPublicAccess
- Kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- Kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- Prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka
Connect string Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- prometheus string
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
- kafka_
connect str Allow clients to connect to kafka_connect from the public internet for service nodes that are in a project VPC or another type of private network.
- prometheus str
Allow clients to connect to prometheus from the public internet for service nodes that are in a project VPC or another type of private network.
KafkaConnectServiceIntegration
- Integration
Type string - Source
Service stringName
- Integration
Type string - Source
Service stringName
- integration
Type string - source
Service stringName
Package Details
- Repository
- https://github.com/pulumi/pulumi-aiven
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aiven
Terraform Provider.