1. Packages
  2. Aiven
  3. API Docs
  4. ServiceIntegration
Aiven v6.7.2 published on Tuesday, Oct 31, 2023 by Pulumi

aiven.ServiceIntegration

Explore with Pulumi AI

aiven logo
Aiven v6.7.2 published on Tuesday, Oct 31, 2023 by Pulumi

    The Service Integration resource allows the creation and management of Aiven Service Integrations.

    Note For services running on hobbyist plan service integrations are not supported.

    Service Integration defines an integration between two Aiven services or between Aiven service and an external integration endpoint. Integration could be for example sending metrics from Kafka service to an InfluxDB service, getting metrics from an InfluxDB service to a Grafana service to show dashboards, sending logs from any service to Elasticsearch, etc.

    Example Usage

    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using Aiven = Pulumi.Aiven;
    
    return await Deployment.RunAsync(() => 
    {
        var myIntegrationMetrics = new Aiven.ServiceIntegration("myIntegrationMetrics", new()
        {
            Project = aiven_project.Myproject.Project,
            IntegrationType = "metrics",
            SourceServiceName = aiven_kafka.Kfk1.Service_name,
            DestinationServiceName = aiven_m3db.M3db.Service_name,
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aiven/sdk/v6/go/aiven"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := aiven.NewServiceIntegration(ctx, "myIntegrationMetrics", &aiven.ServiceIntegrationArgs{
    			Project:                pulumi.Any(aiven_project.Myproject.Project),
    			IntegrationType:        pulumi.String("metrics"),
    			SourceServiceName:      pulumi.Any(aiven_kafka.Kfk1.Service_name),
    			DestinationServiceName: pulumi.Any(aiven_m3db.M3db.Service_name),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aiven.ServiceIntegration;
    import com.pulumi.aiven.ServiceIntegrationArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var myIntegrationMetrics = new ServiceIntegration("myIntegrationMetrics", ServiceIntegrationArgs.builder()        
                .project(aiven_project.myproject().project())
                .integrationType("metrics")
                .sourceServiceName(aiven_kafka.kfk1().service_name())
                .destinationServiceName(aiven_m3db.m3db().service_name())
                .build());
    
        }
    }
    
    import pulumi
    import pulumi_aiven as aiven
    
    my_integration_metrics = aiven.ServiceIntegration("myIntegrationMetrics",
        project=aiven_project["myproject"]["project"],
        integration_type="metrics",
        source_service_name=aiven_kafka["kfk1"]["service_name"],
        destination_service_name=aiven_m3db["m3db"]["service_name"])
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aiven from "@pulumi/aiven";
    
    const myIntegrationMetrics = new aiven.ServiceIntegration("myIntegrationMetrics", {
        project: aiven_project.myproject.project,
        integrationType: "metrics",
        sourceServiceName: aiven_kafka.kfk1.service_name,
        destinationServiceName: aiven_m3db.m3db.service_name,
    });
    
    resources:
      myIntegrationMetrics:
        type: aiven:ServiceIntegration
        properties:
          project: ${aiven_project.myproject.project}
          integrationType: metrics
          sourceServiceName: ${aiven_kafka.kfk1.service_name}
          destinationServiceName: ${aiven_m3db.m3db.service_name}
    

    Create ServiceIntegration Resource

    new ServiceIntegration(name: string, args: ServiceIntegrationArgs, opts?: CustomResourceOptions);
    @overload
    def ServiceIntegration(resource_name: str,
                           opts: Optional[ResourceOptions] = None,
                           clickhouse_kafka_user_config: Optional[ServiceIntegrationClickhouseKafkaUserConfigArgs] = None,
                           clickhouse_postgresql_user_config: Optional[ServiceIntegrationClickhousePostgresqlUserConfigArgs] = None,
                           datadog_user_config: Optional[ServiceIntegrationDatadogUserConfigArgs] = None,
                           destination_endpoint_id: Optional[str] = None,
                           destination_service_name: Optional[str] = None,
                           external_aws_cloudwatch_metrics_user_config: Optional[ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs] = None,
                           integration_type: Optional[str] = None,
                           kafka_connect_user_config: Optional[ServiceIntegrationKafkaConnectUserConfigArgs] = None,
                           kafka_logs_user_config: Optional[ServiceIntegrationKafkaLogsUserConfigArgs] = None,
                           kafka_mirrormaker_user_config: Optional[ServiceIntegrationKafkaMirrormakerUserConfigArgs] = None,
                           logs_user_config: Optional[ServiceIntegrationLogsUserConfigArgs] = None,
                           metrics_user_config: Optional[ServiceIntegrationMetricsUserConfigArgs] = None,
                           project: Optional[str] = None,
                           source_endpoint_id: Optional[str] = None,
                           source_service_name: Optional[str] = None)
    @overload
    def ServiceIntegration(resource_name: str,
                           args: ServiceIntegrationArgs,
                           opts: Optional[ResourceOptions] = None)
    func NewServiceIntegration(ctx *Context, name string, args ServiceIntegrationArgs, opts ...ResourceOption) (*ServiceIntegration, error)
    public ServiceIntegration(string name, ServiceIntegrationArgs args, CustomResourceOptions? opts = null)
    public ServiceIntegration(String name, ServiceIntegrationArgs args)
    public ServiceIntegration(String name, ServiceIntegrationArgs args, CustomResourceOptions options)
    
    type: aiven:ServiceIntegration
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    
    name string
    The unique name of the resource.
    args ServiceIntegrationArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args ServiceIntegrationArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args ServiceIntegrationArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args ServiceIntegrationArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args ServiceIntegrationArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    ServiceIntegration Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The ServiceIntegration resource accepts the following input properties:

    IntegrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    Project string

    Project the integration belongs to

    ClickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    ClickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    DatadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    DestinationEndpointId string

    Destination endpoint for the integration (if any)

    DestinationServiceName string

    Destination service for the integration (if any)

    ExternalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    KafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    KafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    KafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    LogsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    MetricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    SourceEndpointId string

    Source endpoint for the integration (if any)

    SourceServiceName string

    Source service for the integration (if any)

    IntegrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    Project string

    Project the integration belongs to

    ClickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfigArgs

    ClickhouseKafka user configurable settings

    ClickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfigArgs

    ClickhousePostgresql user configurable settings

    DatadogUserConfig ServiceIntegrationDatadogUserConfigArgs

    Datadog user configurable settings

    DestinationEndpointId string

    Destination endpoint for the integration (if any)

    DestinationServiceName string

    Destination service for the integration (if any)

    ExternalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs

    ExternalAwsCloudwatchMetrics user configurable settings

    KafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfigArgs

    KafkaConnect user configurable settings

    KafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfigArgs

    KafkaLogs user configurable settings

    KafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfigArgs

    KafkaMirrormaker user configurable settings

    LogsUserConfig ServiceIntegrationLogsUserConfigArgs

    Logs user configurable settings

    MetricsUserConfig ServiceIntegrationMetricsUserConfigArgs

    Metrics user configurable settings

    SourceEndpointId string

    Source endpoint for the integration (if any)

    SourceServiceName string

    Source service for the integration (if any)

    integrationType String

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    project String

    Project the integration belongs to

    clickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    datadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    destinationEndpointId String

    Destination endpoint for the integration (if any)

    destinationServiceName String

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    kafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    kafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    logsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    metricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    sourceEndpointId String

    Source endpoint for the integration (if any)

    sourceServiceName String

    Source service for the integration (if any)

    integrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    project string

    Project the integration belongs to

    clickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    datadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    destinationEndpointId string

    Destination endpoint for the integration (if any)

    destinationServiceName string

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    kafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    kafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    logsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    metricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    sourceEndpointId string

    Source endpoint for the integration (if any)

    sourceServiceName string

    Source service for the integration (if any)

    integration_type str

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    project str

    Project the integration belongs to

    clickhouse_kafka_user_config ServiceIntegrationClickhouseKafkaUserConfigArgs

    ClickhouseKafka user configurable settings

    clickhouse_postgresql_user_config ServiceIntegrationClickhousePostgresqlUserConfigArgs

    ClickhousePostgresql user configurable settings

    datadog_user_config ServiceIntegrationDatadogUserConfigArgs

    Datadog user configurable settings

    destination_endpoint_id str

    Destination endpoint for the integration (if any)

    destination_service_name str

    Destination service for the integration (if any)

    external_aws_cloudwatch_metrics_user_config ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs

    ExternalAwsCloudwatchMetrics user configurable settings

    kafka_connect_user_config ServiceIntegrationKafkaConnectUserConfigArgs

    KafkaConnect user configurable settings

    kafka_logs_user_config ServiceIntegrationKafkaLogsUserConfigArgs

    KafkaLogs user configurable settings

    kafka_mirrormaker_user_config ServiceIntegrationKafkaMirrormakerUserConfigArgs

    KafkaMirrormaker user configurable settings

    logs_user_config ServiceIntegrationLogsUserConfigArgs

    Logs user configurable settings

    metrics_user_config ServiceIntegrationMetricsUserConfigArgs

    Metrics user configurable settings

    source_endpoint_id str

    Source endpoint for the integration (if any)

    source_service_name str

    Source service for the integration (if any)

    integrationType String

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    project String

    Project the integration belongs to

    clickhouseKafkaUserConfig Property Map

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig Property Map

    ClickhousePostgresql user configurable settings

    datadogUserConfig Property Map

    Datadog user configurable settings

    destinationEndpointId String

    Destination endpoint for the integration (if any)

    destinationServiceName String

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig Property Map

    ExternalAwsCloudwatchMetrics user configurable settings

    kafkaConnectUserConfig Property Map

    KafkaConnect user configurable settings

    kafkaLogsUserConfig Property Map

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig Property Map

    KafkaMirrormaker user configurable settings

    logsUserConfig Property Map

    Logs user configurable settings

    metricsUserConfig Property Map

    Metrics user configurable settings

    sourceEndpointId String

    Source endpoint for the integration (if any)

    sourceServiceName String

    Source service for the integration (if any)

    Outputs

    All input properties are implicitly available as output properties. Additionally, the ServiceIntegration resource produces the following output properties:

    Id string

    The provider-assigned unique ID for this managed resource.

    IntegrationId string

    Service Integration Id at aiven

    Id string

    The provider-assigned unique ID for this managed resource.

    IntegrationId string

    Service Integration Id at aiven

    id String

    The provider-assigned unique ID for this managed resource.

    integrationId String

    Service Integration Id at aiven

    id string

    The provider-assigned unique ID for this managed resource.

    integrationId string

    Service Integration Id at aiven

    id str

    The provider-assigned unique ID for this managed resource.

    integration_id str

    Service Integration Id at aiven

    id String

    The provider-assigned unique ID for this managed resource.

    integrationId String

    Service Integration Id at aiven

    Look up Existing ServiceIntegration Resource

    Get an existing ServiceIntegration resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: ServiceIntegrationState, opts?: CustomResourceOptions): ServiceIntegration
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            clickhouse_kafka_user_config: Optional[ServiceIntegrationClickhouseKafkaUserConfigArgs] = None,
            clickhouse_postgresql_user_config: Optional[ServiceIntegrationClickhousePostgresqlUserConfigArgs] = None,
            datadog_user_config: Optional[ServiceIntegrationDatadogUserConfigArgs] = None,
            destination_endpoint_id: Optional[str] = None,
            destination_service_name: Optional[str] = None,
            external_aws_cloudwatch_metrics_user_config: Optional[ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs] = None,
            integration_id: Optional[str] = None,
            integration_type: Optional[str] = None,
            kafka_connect_user_config: Optional[ServiceIntegrationKafkaConnectUserConfigArgs] = None,
            kafka_logs_user_config: Optional[ServiceIntegrationKafkaLogsUserConfigArgs] = None,
            kafka_mirrormaker_user_config: Optional[ServiceIntegrationKafkaMirrormakerUserConfigArgs] = None,
            logs_user_config: Optional[ServiceIntegrationLogsUserConfigArgs] = None,
            metrics_user_config: Optional[ServiceIntegrationMetricsUserConfigArgs] = None,
            project: Optional[str] = None,
            source_endpoint_id: Optional[str] = None,
            source_service_name: Optional[str] = None) -> ServiceIntegration
    func GetServiceIntegration(ctx *Context, name string, id IDInput, state *ServiceIntegrationState, opts ...ResourceOption) (*ServiceIntegration, error)
    public static ServiceIntegration Get(string name, Input<string> id, ServiceIntegrationState? state, CustomResourceOptions? opts = null)
    public static ServiceIntegration get(String name, Output<String> id, ServiceIntegrationState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    ClickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    ClickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    DatadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    DestinationEndpointId string

    Destination endpoint for the integration (if any)

    DestinationServiceName string

    Destination service for the integration (if any)

    ExternalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    IntegrationId string

    Service Integration Id at aiven

    IntegrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    KafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    KafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    KafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    LogsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    MetricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    Project string

    Project the integration belongs to

    SourceEndpointId string

    Source endpoint for the integration (if any)

    SourceServiceName string

    Source service for the integration (if any)

    ClickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfigArgs

    ClickhouseKafka user configurable settings

    ClickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfigArgs

    ClickhousePostgresql user configurable settings

    DatadogUserConfig ServiceIntegrationDatadogUserConfigArgs

    Datadog user configurable settings

    DestinationEndpointId string

    Destination endpoint for the integration (if any)

    DestinationServiceName string

    Destination service for the integration (if any)

    ExternalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs

    ExternalAwsCloudwatchMetrics user configurable settings

    IntegrationId string

    Service Integration Id at aiven

    IntegrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    KafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfigArgs

    KafkaConnect user configurable settings

    KafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfigArgs

    KafkaLogs user configurable settings

    KafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfigArgs

    KafkaMirrormaker user configurable settings

    LogsUserConfig ServiceIntegrationLogsUserConfigArgs

    Logs user configurable settings

    MetricsUserConfig ServiceIntegrationMetricsUserConfigArgs

    Metrics user configurable settings

    Project string

    Project the integration belongs to

    SourceEndpointId string

    Source endpoint for the integration (if any)

    SourceServiceName string

    Source service for the integration (if any)

    clickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    datadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    destinationEndpointId String

    Destination endpoint for the integration (if any)

    destinationServiceName String

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    integrationId String

    Service Integration Id at aiven

    integrationType String

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    kafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    kafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    logsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    metricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    project String

    Project the integration belongs to

    sourceEndpointId String

    Source endpoint for the integration (if any)

    sourceServiceName String

    Source service for the integration (if any)

    clickhouseKafkaUserConfig ServiceIntegrationClickhouseKafkaUserConfig

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig ServiceIntegrationClickhousePostgresqlUserConfig

    ClickhousePostgresql user configurable settings

    datadogUserConfig ServiceIntegrationDatadogUserConfig

    Datadog user configurable settings

    destinationEndpointId string

    Destination endpoint for the integration (if any)

    destinationServiceName string

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig

    ExternalAwsCloudwatchMetrics user configurable settings

    integrationId string

    Service Integration Id at aiven

    integrationType string

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    kafkaConnectUserConfig ServiceIntegrationKafkaConnectUserConfig

    KafkaConnect user configurable settings

    kafkaLogsUserConfig ServiceIntegrationKafkaLogsUserConfig

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig ServiceIntegrationKafkaMirrormakerUserConfig

    KafkaMirrormaker user configurable settings

    logsUserConfig ServiceIntegrationLogsUserConfig

    Logs user configurable settings

    metricsUserConfig ServiceIntegrationMetricsUserConfig

    Metrics user configurable settings

    project string

    Project the integration belongs to

    sourceEndpointId string

    Source endpoint for the integration (if any)

    sourceServiceName string

    Source service for the integration (if any)

    clickhouse_kafka_user_config ServiceIntegrationClickhouseKafkaUserConfigArgs

    ClickhouseKafka user configurable settings

    clickhouse_postgresql_user_config ServiceIntegrationClickhousePostgresqlUserConfigArgs

    ClickhousePostgresql user configurable settings

    datadog_user_config ServiceIntegrationDatadogUserConfigArgs

    Datadog user configurable settings

    destination_endpoint_id str

    Destination endpoint for the integration (if any)

    destination_service_name str

    Destination service for the integration (if any)

    external_aws_cloudwatch_metrics_user_config ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs

    ExternalAwsCloudwatchMetrics user configurable settings

    integration_id str

    Service Integration Id at aiven

    integration_type str

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    kafka_connect_user_config ServiceIntegrationKafkaConnectUserConfigArgs

    KafkaConnect user configurable settings

    kafka_logs_user_config ServiceIntegrationKafkaLogsUserConfigArgs

    KafkaLogs user configurable settings

    kafka_mirrormaker_user_config ServiceIntegrationKafkaMirrormakerUserConfigArgs

    KafkaMirrormaker user configurable settings

    logs_user_config ServiceIntegrationLogsUserConfigArgs

    Logs user configurable settings

    metrics_user_config ServiceIntegrationMetricsUserConfigArgs

    Metrics user configurable settings

    project str

    Project the integration belongs to

    source_endpoint_id str

    Source endpoint for the integration (if any)

    source_service_name str

    Source service for the integration (if any)

    clickhouseKafkaUserConfig Property Map

    ClickhouseKafka user configurable settings

    clickhousePostgresqlUserConfig Property Map

    ClickhousePostgresql user configurable settings

    datadogUserConfig Property Map

    Datadog user configurable settings

    destinationEndpointId String

    Destination endpoint for the integration (if any)

    destinationServiceName String

    Destination service for the integration (if any)

    externalAwsCloudwatchMetricsUserConfig Property Map

    ExternalAwsCloudwatchMetrics user configurable settings

    integrationId String

    Service Integration Id at aiven

    integrationType String

    Type of the service integration. Possible values: alertmanager, cassandra_cross_service_cluster, clickhouse_kafka, clickhouse_postgresql, dashboard, datadog, datasource, external_aws_cloudwatch_logs, external_aws_cloudwatch_metrics, external_elasticsearch_logs, external_google_cloud_logging, external_opensearch_logs, flink, internal_connectivity, jolokia, kafka_connect, kafka_logs, kafka_mirrormaker, logs, m3aggregator, m3coordinator, metrics, opensearch_cross_cluster_replication, opensearch_cross_cluster_search, prometheus, read_replica, rsyslog, schema_registry_proxy

    kafkaConnectUserConfig Property Map

    KafkaConnect user configurable settings

    kafkaLogsUserConfig Property Map

    KafkaLogs user configurable settings

    kafkaMirrormakerUserConfig Property Map

    KafkaMirrormaker user configurable settings

    logsUserConfig Property Map

    Logs user configurable settings

    metricsUserConfig Property Map

    Metrics user configurable settings

    project String

    Project the integration belongs to

    sourceEndpointId String

    Source endpoint for the integration (if any)

    sourceServiceName String

    Source service for the integration (if any)

    Supporting Types

    ServiceIntegrationClickhouseKafkaUserConfig, ServiceIntegrationClickhouseKafkaUserConfigArgs

    tables List<Property Map>

    Tables to create.

    ServiceIntegrationClickhouseKafkaUserConfigTable, ServiceIntegrationClickhouseKafkaUserConfigTableArgs

    DataFormat string

    Message data format. The default value is JSONEachRow.

    GroupName string

    Kafka consumers group. The default value is clickhouse.

    Name string

    Column name.

    AutoOffsetReset string

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    Columns List<ServiceIntegrationClickhouseKafkaUserConfigTableColumn>

    Table columns.

    DateTimeInputFormat string

    Method to read DateTime from text input formats. The default value is basic.

    HandleErrorMode string

    How to handle errors for Kafka engine. The default value is default.

    MaxBlockSize int

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    MaxRowsPerMessage int

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    NumConsumers int

    The number of consumers per table per replica. The default value is 1.

    PollMaxBatchSize int

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    SkipBrokenMessages int

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    Topics List<ServiceIntegrationClickhouseKafkaUserConfigTableTopic>

    Kafka topics.

    DataFormat string

    Message data format. The default value is JSONEachRow.

    GroupName string

    Kafka consumers group. The default value is clickhouse.

    Name string

    Column name.

    AutoOffsetReset string

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    Columns []ServiceIntegrationClickhouseKafkaUserConfigTableColumn

    Table columns.

    DateTimeInputFormat string

    Method to read DateTime from text input formats. The default value is basic.

    HandleErrorMode string

    How to handle errors for Kafka engine. The default value is default.

    MaxBlockSize int

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    MaxRowsPerMessage int

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    NumConsumers int

    The number of consumers per table per replica. The default value is 1.

    PollMaxBatchSize int

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    SkipBrokenMessages int

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    Topics []ServiceIntegrationClickhouseKafkaUserConfigTableTopic

    Kafka topics.

    dataFormat String

    Message data format. The default value is JSONEachRow.

    groupName String

    Kafka consumers group. The default value is clickhouse.

    name String

    Column name.

    autoOffsetReset String

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    columns List<ServiceIntegrationClickhouseKafkaUserConfigTableColumn>

    Table columns.

    dateTimeInputFormat String

    Method to read DateTime from text input formats. The default value is basic.

    handleErrorMode String

    How to handle errors for Kafka engine. The default value is default.

    maxBlockSize Integer

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    maxRowsPerMessage Integer

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    numConsumers Integer

    The number of consumers per table per replica. The default value is 1.

    pollMaxBatchSize Integer

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    skipBrokenMessages Integer

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    topics List<ServiceIntegrationClickhouseKafkaUserConfigTableTopic>

    Kafka topics.

    dataFormat string

    Message data format. The default value is JSONEachRow.

    groupName string

    Kafka consumers group. The default value is clickhouse.

    name string

    Column name.

    autoOffsetReset string

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    columns ServiceIntegrationClickhouseKafkaUserConfigTableColumn[]

    Table columns.

    dateTimeInputFormat string

    Method to read DateTime from text input formats. The default value is basic.

    handleErrorMode string

    How to handle errors for Kafka engine. The default value is default.

    maxBlockSize number

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    maxRowsPerMessage number

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    numConsumers number

    The number of consumers per table per replica. The default value is 1.

    pollMaxBatchSize number

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    skipBrokenMessages number

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    topics ServiceIntegrationClickhouseKafkaUserConfigTableTopic[]

    Kafka topics.

    data_format str

    Message data format. The default value is JSONEachRow.

    group_name str

    Kafka consumers group. The default value is clickhouse.

    name str

    Column name.

    auto_offset_reset str

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    columns Sequence[ServiceIntegrationClickhouseKafkaUserConfigTableColumn]

    Table columns.

    date_time_input_format str

    Method to read DateTime from text input formats. The default value is basic.

    handle_error_mode str

    How to handle errors for Kafka engine. The default value is default.

    max_block_size int

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    max_rows_per_message int

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    num_consumers int

    The number of consumers per table per replica. The default value is 1.

    poll_max_batch_size int

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    skip_broken_messages int

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    topics Sequence[ServiceIntegrationClickhouseKafkaUserConfigTableTopic]

    Kafka topics.

    dataFormat String

    Message data format. The default value is JSONEachRow.

    groupName String

    Kafka consumers group. The default value is clickhouse.

    name String

    Column name.

    autoOffsetReset String

    Action to take when there is no initial offset in offset store or the desired offset is out of range. The default value is earliest.

    columns List<Property Map>

    Table columns.

    dateTimeInputFormat String

    Method to read DateTime from text input formats. The default value is basic.

    handleErrorMode String

    How to handle errors for Kafka engine. The default value is default.

    maxBlockSize Number

    Number of row collected by poll(s) for flushing data from Kafka. The default value is 0.

    maxRowsPerMessage Number

    The maximum number of rows produced in one kafka message for row-based formats. The default value is 1.

    numConsumers Number

    The number of consumers per table per replica. The default value is 1.

    pollMaxBatchSize Number

    Maximum amount of messages to be polled in a single Kafka poll. The default value is 0.

    skipBrokenMessages Number

    Skip at least this number of broken messages from Kafka topic per block. The default value is 0.

    topics List<Property Map>

    Kafka topics.

    ServiceIntegrationClickhouseKafkaUserConfigTableColumn, ServiceIntegrationClickhouseKafkaUserConfigTableColumnArgs

    Name string

    Column name.

    Type string

    Column type.

    Name string

    Column name.

    Type string

    Column type.

    name String

    Column name.

    type String

    Column type.

    name string

    Column name.

    type string

    Column type.

    name str

    Column name.

    type str

    Column type.

    name String

    Column name.

    type String

    Column type.

    ServiceIntegrationClickhouseKafkaUserConfigTableTopic, ServiceIntegrationClickhouseKafkaUserConfigTableTopicArgs

    Name string

    Column name.

    Name string

    Column name.

    name String

    Column name.

    name string

    Column name.

    name str

    Column name.

    name String

    Column name.

    ServiceIntegrationClickhousePostgresqlUserConfig, ServiceIntegrationClickhousePostgresqlUserConfigArgs

    databases List<Property Map>

    Databases to expose.

    ServiceIntegrationClickhousePostgresqlUserConfigDatabase, ServiceIntegrationClickhousePostgresqlUserConfigDatabaseArgs

    Database string

    PostgreSQL database to expose. The default value is defaultdb.

    Schema string

    PostgreSQL schema to expose. The default value is public.

    Database string

    PostgreSQL database to expose. The default value is defaultdb.

    Schema string

    PostgreSQL schema to expose. The default value is public.

    database String

    PostgreSQL database to expose. The default value is defaultdb.

    schema String

    PostgreSQL schema to expose. The default value is public.

    database string

    PostgreSQL database to expose. The default value is defaultdb.

    schema string

    PostgreSQL schema to expose. The default value is public.

    database str

    PostgreSQL database to expose. The default value is defaultdb.

    schema str

    PostgreSQL schema to expose. The default value is public.

    database String

    PostgreSQL database to expose. The default value is defaultdb.

    schema String

    PostgreSQL schema to expose. The default value is public.

    ServiceIntegrationDatadogUserConfig, ServiceIntegrationDatadogUserConfigArgs

    DatadogDbmEnabled bool

    Enable Datadog Database Monitoring.

    DatadogTags List<ServiceIntegrationDatadogUserConfigDatadogTag>

    Custom tags provided by user.

    ExcludeConsumerGroups List<string>

    List of custom metrics.

    ExcludeTopics List<string>

    List of topics to exclude.

    IncludeConsumerGroups List<string>

    List of custom metrics.

    IncludeTopics List<string>

    List of topics to include.

    KafkaCustomMetrics List<string>

    List of custom metrics.

    MaxJmxMetrics int

    Maximum number of JMX metrics to send.

    Opensearch ServiceIntegrationDatadogUserConfigOpensearch

    Datadog Opensearch Options.

    Redis ServiceIntegrationDatadogUserConfigRedis

    Datadog Redis Options.

    DatadogDbmEnabled bool

    Enable Datadog Database Monitoring.

    DatadogTags []ServiceIntegrationDatadogUserConfigDatadogTag

    Custom tags provided by user.

    ExcludeConsumerGroups []string

    List of custom metrics.

    ExcludeTopics []string

    List of topics to exclude.

    IncludeConsumerGroups []string

    List of custom metrics.

    IncludeTopics []string

    List of topics to include.

    KafkaCustomMetrics []string

    List of custom metrics.

    MaxJmxMetrics int

    Maximum number of JMX metrics to send.

    Opensearch ServiceIntegrationDatadogUserConfigOpensearch

    Datadog Opensearch Options.

    Redis ServiceIntegrationDatadogUserConfigRedis

    Datadog Redis Options.

    datadogDbmEnabled Boolean

    Enable Datadog Database Monitoring.

    datadogTags List<ServiceIntegrationDatadogUserConfigDatadogTag>

    Custom tags provided by user.

    excludeConsumerGroups List<String>

    List of custom metrics.

    excludeTopics List<String>

    List of topics to exclude.

    includeConsumerGroups List<String>

    List of custom metrics.

    includeTopics List<String>

    List of topics to include.

    kafkaCustomMetrics List<String>

    List of custom metrics.

    maxJmxMetrics Integer

    Maximum number of JMX metrics to send.

    opensearch ServiceIntegrationDatadogUserConfigOpensearch

    Datadog Opensearch Options.

    redis ServiceIntegrationDatadogUserConfigRedis

    Datadog Redis Options.

    datadogDbmEnabled boolean

    Enable Datadog Database Monitoring.

    datadogTags ServiceIntegrationDatadogUserConfigDatadogTag[]

    Custom tags provided by user.

    excludeConsumerGroups string[]

    List of custom metrics.

    excludeTopics string[]

    List of topics to exclude.

    includeConsumerGroups string[]

    List of custom metrics.

    includeTopics string[]

    List of topics to include.

    kafkaCustomMetrics string[]

    List of custom metrics.

    maxJmxMetrics number

    Maximum number of JMX metrics to send.

    opensearch ServiceIntegrationDatadogUserConfigOpensearch

    Datadog Opensearch Options.

    redis ServiceIntegrationDatadogUserConfigRedis

    Datadog Redis Options.

    datadog_dbm_enabled bool

    Enable Datadog Database Monitoring.

    datadog_tags Sequence[ServiceIntegrationDatadogUserConfigDatadogTag]

    Custom tags provided by user.

    exclude_consumer_groups Sequence[str]

    List of custom metrics.

    exclude_topics Sequence[str]

    List of topics to exclude.

    include_consumer_groups Sequence[str]

    List of custom metrics.

    include_topics Sequence[str]

    List of topics to include.

    kafka_custom_metrics Sequence[str]

    List of custom metrics.

    max_jmx_metrics int

    Maximum number of JMX metrics to send.

    opensearch ServiceIntegrationDatadogUserConfigOpensearch

    Datadog Opensearch Options.

    redis ServiceIntegrationDatadogUserConfigRedis

    Datadog Redis Options.

    datadogDbmEnabled Boolean

    Enable Datadog Database Monitoring.

    datadogTags List<Property Map>

    Custom tags provided by user.

    excludeConsumerGroups List<String>

    List of custom metrics.

    excludeTopics List<String>

    List of topics to exclude.

    includeConsumerGroups List<String>

    List of custom metrics.

    includeTopics List<String>

    List of topics to include.

    kafkaCustomMetrics List<String>

    List of custom metrics.

    maxJmxMetrics Number

    Maximum number of JMX metrics to send.

    opensearch Property Map

    Datadog Opensearch Options.

    redis Property Map

    Datadog Redis Options.

    ServiceIntegrationDatadogUserConfigDatadogTag, ServiceIntegrationDatadogUserConfigDatadogTagArgs

    Tag string

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    Comment string

    Optional tag explanation.

    Tag string

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    Comment string

    Optional tag explanation.

    tag String

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    comment String

    Optional tag explanation.

    tag string

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    comment string

    Optional tag explanation.

    tag str

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    comment str

    Optional tag explanation.

    tag String

    Tag format and usage are described here: https://docs.datadoghq.com/getting_started/tagging. Tags with prefix 'aiven-' are reserved for Aiven.

    comment String

    Optional tag explanation.

    ServiceIntegrationDatadogUserConfigOpensearch, ServiceIntegrationDatadogUserConfigOpensearchArgs

    IndexStatsEnabled bool

    Enable Datadog Opensearch Index Monitoring.

    PendingTaskStatsEnabled bool

    Enable Datadog Opensearch Pending Task Monitoring.

    PshardStatsEnabled bool

    Enable Datadog Opensearch Primary Shard Monitoring.

    IndexStatsEnabled bool

    Enable Datadog Opensearch Index Monitoring.

    PendingTaskStatsEnabled bool

    Enable Datadog Opensearch Pending Task Monitoring.

    PshardStatsEnabled bool

    Enable Datadog Opensearch Primary Shard Monitoring.

    indexStatsEnabled Boolean

    Enable Datadog Opensearch Index Monitoring.

    pendingTaskStatsEnabled Boolean

    Enable Datadog Opensearch Pending Task Monitoring.

    pshardStatsEnabled Boolean

    Enable Datadog Opensearch Primary Shard Monitoring.

    indexStatsEnabled boolean

    Enable Datadog Opensearch Index Monitoring.

    pendingTaskStatsEnabled boolean

    Enable Datadog Opensearch Pending Task Monitoring.

    pshardStatsEnabled boolean

    Enable Datadog Opensearch Primary Shard Monitoring.

    index_stats_enabled bool

    Enable Datadog Opensearch Index Monitoring.

    pending_task_stats_enabled bool

    Enable Datadog Opensearch Pending Task Monitoring.

    pshard_stats_enabled bool

    Enable Datadog Opensearch Primary Shard Monitoring.

    indexStatsEnabled Boolean

    Enable Datadog Opensearch Index Monitoring.

    pendingTaskStatsEnabled Boolean

    Enable Datadog Opensearch Pending Task Monitoring.

    pshardStatsEnabled Boolean

    Enable Datadog Opensearch Primary Shard Monitoring.

    ServiceIntegrationDatadogUserConfigRedis, ServiceIntegrationDatadogUserConfigRedisArgs

    CommandStatsEnabled bool

    Enable command_stats option in the agent's configuration. The default value is false.

    CommandStatsEnabled bool

    Enable command_stats option in the agent's configuration. The default value is false.

    commandStatsEnabled Boolean

    Enable command_stats option in the agent's configuration. The default value is false.

    commandStatsEnabled boolean

    Enable command_stats option in the agent's configuration. The default value is false.

    command_stats_enabled bool

    Enable command_stats option in the agent's configuration. The default value is false.

    commandStatsEnabled Boolean

    Enable command_stats option in the agent's configuration. The default value is false.

    ServiceIntegrationExternalAwsCloudwatchMetricsUserConfig, ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigArgs

    DroppedMetrics List<ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric>

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    ExtraMetrics List<ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric>

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    DroppedMetrics []ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    ExtraMetrics []ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    droppedMetrics List<ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric>

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    extraMetrics List<ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric>

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    droppedMetrics ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric[]

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    extraMetrics ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric[]

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    dropped_metrics Sequence[ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric]

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    extra_metrics Sequence[ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric]

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    droppedMetrics List<Property Map>

    Metrics to not send to AWS CloudWatch (takes precedence over extra*metrics).

    extraMetrics List<Property Map>

    Metrics to allow through to AWS CloudWatch (in addition to default metrics).

    ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetric, ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigDroppedMetricArgs

    Field string

    Identifier of a value in the metric.

    Metric string

    Identifier of the metric.

    Field string

    Identifier of a value in the metric.

    Metric string

    Identifier of the metric.

    field String

    Identifier of a value in the metric.

    metric String

    Identifier of the metric.

    field string

    Identifier of a value in the metric.

    metric string

    Identifier of the metric.

    field str

    Identifier of a value in the metric.

    metric str

    Identifier of the metric.

    field String

    Identifier of a value in the metric.

    metric String

    Identifier of the metric.

    ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetric, ServiceIntegrationExternalAwsCloudwatchMetricsUserConfigExtraMetricArgs

    Field string

    Identifier of a value in the metric.

    Metric string

    Identifier of the metric.

    Field string

    Identifier of a value in the metric.

    Metric string

    Identifier of the metric.

    field String

    Identifier of a value in the metric.

    metric String

    Identifier of the metric.

    field string

    Identifier of a value in the metric.

    metric string

    Identifier of the metric.

    field str

    Identifier of a value in the metric.

    metric str

    Identifier of the metric.

    field String

    Identifier of a value in the metric.

    metric String

    Identifier of the metric.

    ServiceIntegrationKafkaConnectUserConfig, ServiceIntegrationKafkaConnectUserConfigArgs

    KafkaConnect ServiceIntegrationKafkaConnectUserConfigKafkaConnect

    Kafka Connect service configuration values.

    KafkaConnect ServiceIntegrationKafkaConnectUserConfigKafkaConnect

    Kafka Connect service configuration values.

    kafkaConnect ServiceIntegrationKafkaConnectUserConfigKafkaConnect

    Kafka Connect service configuration values.

    kafkaConnect ServiceIntegrationKafkaConnectUserConfigKafkaConnect

    Kafka Connect service configuration values.

    kafka_connect ServiceIntegrationKafkaConnectUserConfigKafkaConnect

    Kafka Connect service configuration values.

    kafkaConnect Property Map

    Kafka Connect service configuration values.

    ServiceIntegrationKafkaConnectUserConfigKafkaConnect, ServiceIntegrationKafkaConnectUserConfigKafkaConnectArgs

    ConfigStorageTopic string

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    GroupId string

    A unique string that identifies the Connect cluster group this worker belongs to.

    OffsetStorageTopic string

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    StatusStorageTopic string

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    ConfigStorageTopic string

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    GroupId string

    A unique string that identifies the Connect cluster group this worker belongs to.

    OffsetStorageTopic string

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    StatusStorageTopic string

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    configStorageTopic String

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    groupId String

    A unique string that identifies the Connect cluster group this worker belongs to.

    offsetStorageTopic String

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    statusStorageTopic String

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    configStorageTopic string

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    groupId string

    A unique string that identifies the Connect cluster group this worker belongs to.

    offsetStorageTopic string

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    statusStorageTopic string

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    config_storage_topic str

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    group_id str

    A unique string that identifies the Connect cluster group this worker belongs to.

    offset_storage_topic str

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    status_storage_topic str

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    configStorageTopic String

    The name of the topic where connector and task configuration data are stored.This must be the same for all workers with the same group_id.

    groupId String

    A unique string that identifies the Connect cluster group this worker belongs to.

    offsetStorageTopic String

    The name of the topic where connector and task configuration offsets are stored.This must be the same for all workers with the same group_id.

    statusStorageTopic String

    The name of the topic where connector and task configuration status updates are stored.This must be the same for all workers with the same group_id.

    ServiceIntegrationKafkaLogsUserConfig, ServiceIntegrationKafkaLogsUserConfigArgs

    KafkaTopic string

    Topic name.

    SelectedLogFields List<string>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    KafkaTopic string

    Topic name.

    SelectedLogFields []string

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    kafkaTopic String

    Topic name.

    selectedLogFields List<String>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    kafkaTopic string

    Topic name.

    selectedLogFields string[]

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    kafka_topic str

    Topic name.

    selected_log_fields Sequence[str]

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    kafkaTopic String

    Topic name.

    selectedLogFields List<String>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    ServiceIntegrationKafkaMirrormakerUserConfig, ServiceIntegrationKafkaMirrormakerUserConfigArgs

    ClusterAlias string

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    KafkaMirrormaker ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker

    Kafka MirrorMaker configuration values.

    ClusterAlias string

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    KafkaMirrormaker ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker

    Kafka MirrorMaker configuration values.

    clusterAlias String

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    kafkaMirrormaker ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker

    Kafka MirrorMaker configuration values.

    clusterAlias string

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    kafkaMirrormaker ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker

    Kafka MirrorMaker configuration values.

    cluster_alias str

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    kafka_mirrormaker ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker

    Kafka MirrorMaker configuration values.

    clusterAlias String

    The alias under which the Kafka cluster is known to MirrorMaker. Can contain the following symbols: ASCII alphanumerics, '.', '_', and '-'.

    kafkaMirrormaker Property Map

    Kafka MirrorMaker configuration values.

    ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormaker, ServiceIntegrationKafkaMirrormakerUserConfigKafkaMirrormakerArgs

    ConsumerFetchMinBytes int

    The minimum amount of data the server should return for a fetch request.

    ProducerBatchSize int

    The batch size in bytes producer will attempt to collect before publishing to broker.

    ProducerBufferMemory int

    The amount of bytes producer can use for buffering data before publishing to broker.

    ProducerCompressionType string

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    ProducerLingerMs int

    The linger time (ms) for waiting new data to arrive for publishing.

    ProducerMaxRequestSize int

    The maximum request size in bytes.

    ConsumerFetchMinBytes int

    The minimum amount of data the server should return for a fetch request.

    ProducerBatchSize int

    The batch size in bytes producer will attempt to collect before publishing to broker.

    ProducerBufferMemory int

    The amount of bytes producer can use for buffering data before publishing to broker.

    ProducerCompressionType string

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    ProducerLingerMs int

    The linger time (ms) for waiting new data to arrive for publishing.

    ProducerMaxRequestSize int

    The maximum request size in bytes.

    consumerFetchMinBytes Integer

    The minimum amount of data the server should return for a fetch request.

    producerBatchSize Integer

    The batch size in bytes producer will attempt to collect before publishing to broker.

    producerBufferMemory Integer

    The amount of bytes producer can use for buffering data before publishing to broker.

    producerCompressionType String

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    producerLingerMs Integer

    The linger time (ms) for waiting new data to arrive for publishing.

    producerMaxRequestSize Integer

    The maximum request size in bytes.

    consumerFetchMinBytes number

    The minimum amount of data the server should return for a fetch request.

    producerBatchSize number

    The batch size in bytes producer will attempt to collect before publishing to broker.

    producerBufferMemory number

    The amount of bytes producer can use for buffering data before publishing to broker.

    producerCompressionType string

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    producerLingerMs number

    The linger time (ms) for waiting new data to arrive for publishing.

    producerMaxRequestSize number

    The maximum request size in bytes.

    consumer_fetch_min_bytes int

    The minimum amount of data the server should return for a fetch request.

    producer_batch_size int

    The batch size in bytes producer will attempt to collect before publishing to broker.

    producer_buffer_memory int

    The amount of bytes producer can use for buffering data before publishing to broker.

    producer_compression_type str

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    producer_linger_ms int

    The linger time (ms) for waiting new data to arrive for publishing.

    producer_max_request_size int

    The maximum request size in bytes.

    consumerFetchMinBytes Number

    The minimum amount of data the server should return for a fetch request.

    producerBatchSize Number

    The batch size in bytes producer will attempt to collect before publishing to broker.

    producerBufferMemory Number

    The amount of bytes producer can use for buffering data before publishing to broker.

    producerCompressionType String

    Specify the default compression type for producers. This configuration accepts the standard compression codecs ('gzip', 'snappy', 'lz4', 'zstd'). It additionally accepts 'none' which is the default and equivalent to no compression.

    producerLingerMs Number

    The linger time (ms) for waiting new data to arrive for publishing.

    producerMaxRequestSize Number

    The maximum request size in bytes.

    ServiceIntegrationLogsUserConfig, ServiceIntegrationLogsUserConfigArgs

    ElasticsearchIndexDaysMax int

    Elasticsearch index retention limit. The default value is 3.

    ElasticsearchIndexPrefix string

    Elasticsearch index prefix. The default value is logs.

    SelectedLogFields List<string>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    ElasticsearchIndexDaysMax int

    Elasticsearch index retention limit. The default value is 3.

    ElasticsearchIndexPrefix string

    Elasticsearch index prefix. The default value is logs.

    SelectedLogFields []string

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    elasticsearchIndexDaysMax Integer

    Elasticsearch index retention limit. The default value is 3.

    elasticsearchIndexPrefix String

    Elasticsearch index prefix. The default value is logs.

    selectedLogFields List<String>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    elasticsearchIndexDaysMax number

    Elasticsearch index retention limit. The default value is 3.

    elasticsearchIndexPrefix string

    Elasticsearch index prefix. The default value is logs.

    selectedLogFields string[]

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    elasticsearch_index_days_max int

    Elasticsearch index retention limit. The default value is 3.

    elasticsearch_index_prefix str

    Elasticsearch index prefix. The default value is logs.

    selected_log_fields Sequence[str]

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    elasticsearchIndexDaysMax Number

    Elasticsearch index retention limit. The default value is 3.

    elasticsearchIndexPrefix String

    Elasticsearch index prefix. The default value is logs.

    selectedLogFields List<String>

    The list of logging fields that will be sent to the integration logging service. The MESSAGE and timestamp fields are always sent.

    ServiceIntegrationMetricsUserConfig, ServiceIntegrationMetricsUserConfigArgs

    Database string

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    RetentionDays int

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    RoUsername string

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    SourceMysql ServiceIntegrationMetricsUserConfigSourceMysql

    Configuration options for metrics where source service is MySQL.

    Username string

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    Database string

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    RetentionDays int

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    RoUsername string

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    SourceMysql ServiceIntegrationMetricsUserConfigSourceMysql

    Configuration options for metrics where source service is MySQL.

    Username string

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    database String

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    retentionDays Integer

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    roUsername String

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    sourceMysql ServiceIntegrationMetricsUserConfigSourceMysql

    Configuration options for metrics where source service is MySQL.

    username String

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    database string

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    retentionDays number

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    roUsername string

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    sourceMysql ServiceIntegrationMetricsUserConfigSourceMysql

    Configuration options for metrics where source service is MySQL.

    username string

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    database str

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    retention_days int

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    ro_username str

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    source_mysql ServiceIntegrationMetricsUserConfigSourceMysql

    Configuration options for metrics where source service is MySQL.

    username str

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    database String

    Name of the database where to store metric datapoints. Only affects PostgreSQL destinations. Defaults to 'metrics'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    retentionDays Number

    Number of days to keep old metrics. Only affects PostgreSQL destinations. Set to 0 for no automatic cleanup. Defaults to 30 days.

    roUsername String

    Name of a user that can be used to read metrics. This will be used for Grafana integration (if enabled) to prevent Grafana users from making undesired changes. Only affects PostgreSQL destinations. Defaults to 'metrics_reader'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    sourceMysql Property Map

    Configuration options for metrics where source service is MySQL.

    username String

    Name of the user used to write metrics. Only affects PostgreSQL destinations. Defaults to 'metrics_writer'. Note that this must be the same for all metrics integrations that write data to the same PostgreSQL service.

    ServiceIntegrationMetricsUserConfigSourceMysql, ServiceIntegrationMetricsUserConfigSourceMysqlArgs

    Telegraf ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf

    Configuration options for Telegraf MySQL input plugin.

    Telegraf ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf

    Configuration options for Telegraf MySQL input plugin.

    telegraf ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf

    Configuration options for Telegraf MySQL input plugin.

    telegraf ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf

    Configuration options for Telegraf MySQL input plugin.

    telegraf ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf

    Configuration options for Telegraf MySQL input plugin.

    telegraf Property Map

    Configuration options for Telegraf MySQL input plugin.

    ServiceIntegrationMetricsUserConfigSourceMysqlTelegraf, ServiceIntegrationMetricsUserConfigSourceMysqlTelegrafArgs

    GatherEventWaits bool

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    GatherFileEventsStats bool

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    GatherIndexIoWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    GatherInfoSchemaAutoInc bool

    Gather auto_increment columns and max values from information schema.

    GatherInnodbMetrics bool

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    GatherPerfEventsStatements bool

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    GatherProcessList bool

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    GatherSlaveStatus bool

    Gather metrics from SHOW SLAVE STATUS command output.

    GatherTableIoWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    GatherTableLockWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    GatherTableSchema bool

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    PerfEventsStatementsDigestTextLimit int

    Truncates digest text from perfeventsstatements into this many characters.

    PerfEventsStatementsLimit int

    Limits metrics from perfeventsstatements.

    PerfEventsStatementsTimeLimit int

    Only include perfeventsstatements whose last seen is less than this many seconds.

    GatherEventWaits bool

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    GatherFileEventsStats bool

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    GatherIndexIoWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    GatherInfoSchemaAutoInc bool

    Gather auto_increment columns and max values from information schema.

    GatherInnodbMetrics bool

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    GatherPerfEventsStatements bool

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    GatherProcessList bool

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    GatherSlaveStatus bool

    Gather metrics from SHOW SLAVE STATUS command output.

    GatherTableIoWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    GatherTableLockWaits bool

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    GatherTableSchema bool

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    PerfEventsStatementsDigestTextLimit int

    Truncates digest text from perfeventsstatements into this many characters.

    PerfEventsStatementsLimit int

    Limits metrics from perfeventsstatements.

    PerfEventsStatementsTimeLimit int

    Only include perfeventsstatements whose last seen is less than this many seconds.

    gatherEventWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    gatherFileEventsStats Boolean

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    gatherIndexIoWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    gatherInfoSchemaAutoInc Boolean

    Gather auto_increment columns and max values from information schema.

    gatherInnodbMetrics Boolean

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    gatherPerfEventsStatements Boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    gatherProcessList Boolean

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    gatherSlaveStatus Boolean

    Gather metrics from SHOW SLAVE STATUS command output.

    gatherTableIoWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    gatherTableLockWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    gatherTableSchema Boolean

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    perfEventsStatementsDigestTextLimit Integer

    Truncates digest text from perfeventsstatements into this many characters.

    perfEventsStatementsLimit Integer

    Limits metrics from perfeventsstatements.

    perfEventsStatementsTimeLimit Integer

    Only include perfeventsstatements whose last seen is less than this many seconds.

    gatherEventWaits boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    gatherFileEventsStats boolean

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    gatherIndexIoWaits boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    gatherInfoSchemaAutoInc boolean

    Gather auto_increment columns and max values from information schema.

    gatherInnodbMetrics boolean

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    gatherPerfEventsStatements boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    gatherProcessList boolean

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    gatherSlaveStatus boolean

    Gather metrics from SHOW SLAVE STATUS command output.

    gatherTableIoWaits boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    gatherTableLockWaits boolean

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    gatherTableSchema boolean

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    perfEventsStatementsDigestTextLimit number

    Truncates digest text from perfeventsstatements into this many characters.

    perfEventsStatementsLimit number

    Limits metrics from perfeventsstatements.

    perfEventsStatementsTimeLimit number

    Only include perfeventsstatements whose last seen is less than this many seconds.

    gather_event_waits bool

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    gather_file_events_stats bool

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    gather_index_io_waits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    gather_info_schema_auto_inc bool

    Gather auto_increment columns and max values from information schema.

    gather_innodb_metrics bool

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    gather_perf_events_statements bool

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    gather_process_list bool

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    gather_slave_status bool

    Gather metrics from SHOW SLAVE STATUS command output.

    gather_table_io_waits bool

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    gather_table_lock_waits bool

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    gather_table_schema bool

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    perf_events_statements_digest_text_limit int

    Truncates digest text from perfeventsstatements into this many characters.

    perf_events_statements_limit int

    Limits metrics from perfeventsstatements.

    perf_events_statements_time_limit int

    Only include perfeventsstatements whose last seen is less than this many seconds.

    gatherEventWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTWAITS.

    gatherFileEventsStats Boolean

    gather metrics from PERFORMANCESCHEMA.FILESUMMARYBYEVENT_NAME.

    gatherIndexIoWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYINDEX_USAGE.

    gatherInfoSchemaAutoInc Boolean

    Gather auto_increment columns and max values from information schema.

    gatherInnodbMetrics Boolean

    Gather metrics from INFORMATIONSCHEMA.INNODBMETRICS.

    gatherPerfEventsStatements Boolean

    Gather metrics from PERFORMANCESCHEMA.EVENTSSTATEMENTSSUMMARYBY_DIGEST.

    gatherProcessList Boolean

    Gather thread state counts from INFORMATION_SCHEMA.PROCESSLIST.

    gatherSlaveStatus Boolean

    Gather metrics from SHOW SLAVE STATUS command output.

    gatherTableIoWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLEIOWAITSSUMMARYBYTABLE.

    gatherTableLockWaits Boolean

    Gather metrics from PERFORMANCESCHEMA.TABLELOCK_WAITS.

    gatherTableSchema Boolean

    Gather metrics from INFORMATION_SCHEMA.TABLES.

    perfEventsStatementsDigestTextLimit Number

    Truncates digest text from perfeventsstatements into this many characters.

    perfEventsStatementsLimit Number

    Limits metrics from perfeventsstatements.

    perfEventsStatementsTimeLimit Number

    Only include perfeventsstatements whose last seen is less than this many seconds.

    Import

     $ pulumi import aiven:index/serviceIntegration:ServiceIntegration myintegration project/integration_id
    

    Package Details

    Repository
    Aiven pulumi/pulumi-aiven
    License
    Apache-2.0
    Notes

    This Pulumi package is based on the aiven Terraform Provider.

    aiven logo
    Aiven v6.7.2 published on Tuesday, Oct 31, 2023 by Pulumi