1. Packages
  2. Packages
  3. Aiven Provider
  4. API Docs
  5. FlinkJobTable
Viewing docs for Aiven v5.6.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi
aiven logo
Viewing docs for Aiven v5.6.0 (Older version)
published on Monday, Mar 9, 2026 by Pulumi

    The Flink Table resource allows the creation and management of Aiven Tables.

    Example Usage

    using System.Collections.Generic;
    using Pulumi;
    using Aiven = Pulumi.Aiven;
    
    return await Deployment.RunAsync(() => 
    {
        var table = new Aiven.FlinkJobTable("table", new()
        {
            Project = data.Aiven_project.Pr1.Project,
            ServiceName = aiven_flink.Flink.Service_name,
            TableName = "<TABLE_NAME>",
            IntegrationId = aiven_service_integration.Flink_kafka.Service_id,
            JdbcTable = "<JDBC_TABLE_NAME>",
            KafkaTopic = aiven_kafka_topic.Table_topic.Topic_name,
            SchemaSql = @"      `+""`cpu`""+` INT,
          `+""`node`""+` INT,
          `+""`occurred_at`""+` TIMESTAMP(3) METADATA FROM 'timestamp',
          WATERMARK FOR `+""`occurred_at`""+` AS `+""`occurred_at`""+` - INTERVAL '5' SECOND
    ",
        });
    
    });
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-aiven/sdk/v5/go/aiven"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		_, err := aiven.NewFlinkJobTable(ctx, "table", &aiven.FlinkJobTableArgs{
    			Project:       pulumi.Any(data.Aiven_project.Pr1.Project),
    			ServiceName:   pulumi.Any(aiven_flink.Flink.Service_name),
    			TableName:     pulumi.String("<TABLE_NAME>"),
    			IntegrationId: pulumi.Any(aiven_service_integration.Flink_kafka.Service_id),
    			JdbcTable:     pulumi.String("<JDBC_TABLE_NAME>"),
    			KafkaTopic:    pulumi.Any(aiven_kafka_topic.Table_topic.Topic_name),
    			SchemaSql:     pulumi.String("      `+\"`cpu`\"+` INT,\n      `+\"`node`\"+` INT,\n      `+\"`occurred_at`\"+` TIMESTAMP(3) METADATA FROM 'timestamp',\n      WATERMARK FOR `+\"`occurred_at`\"+` AS `+\"`occurred_at`\"+` - INTERVAL '5' SECOND\n"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.aiven.FlinkJobTable;
    import com.pulumi.aiven.FlinkJobTableArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            var table = new FlinkJobTable("table", FlinkJobTableArgs.builder()        
                .project(data.aiven_project().pr1().project())
                .serviceName(aiven_flink.flink().service_name())
                .tableName("<TABLE_NAME>")
                .integrationId(aiven_service_integration.flink_kafka().service_id())
                .jdbcTable("<JDBC_TABLE_NAME>")
                .kafkaTopic(aiven_kafka_topic.table_topic().topic_name())
                .schemaSql("""
          `+"`cpu`"+` INT,
          `+"`node`"+` INT,
          `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
          WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
                """)
                .build());
    
        }
    }
    
    import * as pulumi from "@pulumi/pulumi";
    import * as aiven from "@pulumi/aiven";
    
    const table = new aiven.FlinkJobTable("table", {
        project: data.aiven_project.pr1.project,
        serviceName: aiven_flink.flink.service_name,
        tableName: "<TABLE_NAME>",
        integrationId: aiven_service_integration.flink_kafka.service_id,
        jdbcTable: "<JDBC_TABLE_NAME>",
        kafkaTopic: aiven_kafka_topic.table_topic.topic_name,
        schemaSql: `      \`+"\`cpu\`"+\` INT,
          \`+"\`node\`"+\` INT,
          \`+"\`occurred_at\`"+\` TIMESTAMP(3) METADATA FROM 'timestamp',
          WATERMARK FOR \`+"\`occurred_at\`"+\` AS \`+"\`occurred_at\`"+\` - INTERVAL '5' SECOND
    `,
    });
    
    import pulumi
    import pulumi_aiven as aiven
    
    table = aiven.FlinkJobTable("table",
        project=data["aiven_project"]["pr1"]["project"],
        service_name=aiven_flink["flink"]["service_name"],
        table_name="<TABLE_NAME>",
        integration_id=aiven_service_integration["flink_kafka"]["service_id"],
        jdbc_table="<JDBC_TABLE_NAME>",
        kafka_topic=aiven_kafka_topic["table_topic"]["topic_name"],
        schema_sql="""      `+"`cpu`"+` INT,
          `+"`node`"+` INT,
          `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
          WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
    """)
    
    resources:
      table:
        type: aiven:FlinkJobTable
        properties:
          project: ${data.aiven_project.pr1.project}
          serviceName: ${aiven_flink.flink.service_name}
          tableName: <TABLE_NAME>
          integrationId: ${aiven_service_integration.flink_kafka.service_id}
          # valid if the service integration refers to a postgres or mysql service
          jdbcTable: <JDBC_TABLE_NAME>
          # valid if the service integration refers to a kafka service
          kafkaTopic: ${aiven_kafka_topic.table_topic.topic_name}
          schemaSql: |2
                  `+"`cpu`"+` INT,
                  `+"`node`"+` INT,
                  `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
                  WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
    

    Create FlinkJobTable Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new FlinkJobTable(name: string, args: FlinkJobTableArgs, opts?: CustomResourceOptions);
    @overload
    def FlinkJobTable(resource_name: str,
                      args: FlinkJobTableArgs,
                      opts: Optional[ResourceOptions] = None)
    
    @overload
    def FlinkJobTable(resource_name: str,
                      opts: Optional[ResourceOptions] = None,
                      project: Optional[str] = None,
                      table_name: Optional[str] = None,
                      service_name: Optional[str] = None,
                      schema_sql: Optional[str] = None,
                      integration_id: Optional[str] = None,
                      kafka_key_format: Optional[str] = None,
                      kafka_topic: Optional[str] = None,
                      kafka_value_fields_include: Optional[str] = None,
                      kafka_value_format: Optional[str] = None,
                      like_options: Optional[str] = None,
                      opensearch_index: Optional[str] = None,
                      kafka_startup_mode: Optional[str] = None,
                      kafka_key_fields: Optional[Sequence[str]] = None,
                      kafka_connector_type: Optional[str] = None,
                      jdbc_table: Optional[str] = None,
                      upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None)
    func NewFlinkJobTable(ctx *Context, name string, args FlinkJobTableArgs, opts ...ResourceOption) (*FlinkJobTable, error)
    public FlinkJobTable(string name, FlinkJobTableArgs args, CustomResourceOptions? opts = null)
    public FlinkJobTable(String name, FlinkJobTableArgs args)
    public FlinkJobTable(String name, FlinkJobTableArgs args, CustomResourceOptions options)
    
    type: aiven:FlinkJobTable
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args FlinkJobTableArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args FlinkJobTableArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args FlinkJobTableArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args FlinkJobTableArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args FlinkJobTableArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var flinkJobTableResource = new Aiven.FlinkJobTable("flinkJobTableResource", new()
    {
        Project = "string",
        TableName = "string",
        ServiceName = "string",
        SchemaSql = "string",
        IntegrationId = "string",
        KafkaKeyFormat = "string",
        KafkaTopic = "string",
        KafkaValueFieldsInclude = "string",
        KafkaValueFormat = "string",
        LikeOptions = "string",
        OpensearchIndex = "string",
        KafkaStartupMode = "string",
        KafkaKeyFields = new[]
        {
            "string",
        },
        KafkaConnectorType = "string",
        JdbcTable = "string",
        UpsertKafka = new Aiven.Inputs.FlinkJobTableUpsertKafkaArgs
        {
            KeyFields = new[]
            {
                "string",
            },
            KeyFormat = "string",
            ScanStartupMode = "string",
            Topic = "string",
            ValueFieldsInclude = "string",
            ValueFormat = "string",
        },
    });
    
    example, err := aiven.NewFlinkJobTable(ctx, "flinkJobTableResource", &aiven.FlinkJobTableArgs{
    	Project:                 pulumi.String("string"),
    	TableName:               pulumi.String("string"),
    	ServiceName:             pulumi.String("string"),
    	SchemaSql:               pulumi.String("string"),
    	IntegrationId:           pulumi.String("string"),
    	KafkaKeyFormat:          pulumi.String("string"),
    	KafkaTopic:              pulumi.String("string"),
    	KafkaValueFieldsInclude: pulumi.String("string"),
    	KafkaValueFormat:        pulumi.String("string"),
    	LikeOptions:             pulumi.String("string"),
    	OpensearchIndex:         pulumi.String("string"),
    	KafkaStartupMode:        pulumi.String("string"),
    	KafkaKeyFields: pulumi.StringArray{
    		pulumi.String("string"),
    	},
    	KafkaConnectorType: pulumi.String("string"),
    	JdbcTable:          pulumi.String("string"),
    	UpsertKafka: &aiven.FlinkJobTableUpsertKafkaArgs{
    		KeyFields: pulumi.StringArray{
    			pulumi.String("string"),
    		},
    		KeyFormat:          pulumi.String("string"),
    		ScanStartupMode:    pulumi.String("string"),
    		Topic:              pulumi.String("string"),
    		ValueFieldsInclude: pulumi.String("string"),
    		ValueFormat:        pulumi.String("string"),
    	},
    })
    
    var flinkJobTableResource = new FlinkJobTable("flinkJobTableResource", FlinkJobTableArgs.builder()
        .project("string")
        .tableName("string")
        .serviceName("string")
        .schemaSql("string")
        .integrationId("string")
        .kafkaKeyFormat("string")
        .kafkaTopic("string")
        .kafkaValueFieldsInclude("string")
        .kafkaValueFormat("string")
        .likeOptions("string")
        .opensearchIndex("string")
        .kafkaStartupMode("string")
        .kafkaKeyFields("string")
        .kafkaConnectorType("string")
        .jdbcTable("string")
        .upsertKafka(FlinkJobTableUpsertKafkaArgs.builder()
            .keyFields("string")
            .keyFormat("string")
            .scanStartupMode("string")
            .topic("string")
            .valueFieldsInclude("string")
            .valueFormat("string")
            .build())
        .build());
    
    flink_job_table_resource = aiven.FlinkJobTable("flinkJobTableResource",
        project="string",
        table_name="string",
        service_name="string",
        schema_sql="string",
        integration_id="string",
        kafka_key_format="string",
        kafka_topic="string",
        kafka_value_fields_include="string",
        kafka_value_format="string",
        like_options="string",
        opensearch_index="string",
        kafka_startup_mode="string",
        kafka_key_fields=["string"],
        kafka_connector_type="string",
        jdbc_table="string",
        upsert_kafka={
            "key_fields": ["string"],
            "key_format": "string",
            "scan_startup_mode": "string",
            "topic": "string",
            "value_fields_include": "string",
            "value_format": "string",
        })
    
    const flinkJobTableResource = new aiven.FlinkJobTable("flinkJobTableResource", {
        project: "string",
        tableName: "string",
        serviceName: "string",
        schemaSql: "string",
        integrationId: "string",
        kafkaKeyFormat: "string",
        kafkaTopic: "string",
        kafkaValueFieldsInclude: "string",
        kafkaValueFormat: "string",
        likeOptions: "string",
        opensearchIndex: "string",
        kafkaStartupMode: "string",
        kafkaKeyFields: ["string"],
        kafkaConnectorType: "string",
        jdbcTable: "string",
        upsertKafka: {
            keyFields: ["string"],
            keyFormat: "string",
            scanStartupMode: "string",
            topic: "string",
            valueFieldsInclude: "string",
            valueFormat: "string",
        },
    });
    
    type: aiven:FlinkJobTable
    properties:
        integrationId: string
        jdbcTable: string
        kafkaConnectorType: string
        kafkaKeyFields:
            - string
        kafkaKeyFormat: string
        kafkaStartupMode: string
        kafkaTopic: string
        kafkaValueFieldsInclude: string
        kafkaValueFormat: string
        likeOptions: string
        opensearchIndex: string
        project: string
        schemaSql: string
        serviceName: string
        tableName: string
        upsertKafka:
            keyFields:
                - string
            keyFormat: string
            scanStartupMode: string
            topic: string
            valueFieldsInclude: string
            valueFormat: string
    

    FlinkJobTable Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.

    The FlinkJobTable resource accepts the following input properties:

    IntegrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    Project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    SchemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    ServiceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    TableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    JdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFields List<string>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    KafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    KafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    LikeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    OpensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    UpsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    IntegrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    Project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    SchemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    ServiceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    TableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    JdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFields []string
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    KafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    KafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    LikeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    OpensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    UpsertKafka FlinkJobTableUpsertKafkaArgs
    Kafka upsert connector configuration.
    integrationId String
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    project String
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql String
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName String
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableName String
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable String
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType String
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields List<String>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat String
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode String
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic String
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat String
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions String
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex String
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    integrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields string[]
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    integration_id str
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    project str
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schema_sql str
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    service_name str
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    table_name str
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    jdbc_table str
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafka_connector_type str
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafka_key_fields Sequence[str]
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafka_key_format str
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafka_startup_mode str
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafka_topic str
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafka_value_fields_include str
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafka_value_format str
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    like_options str
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearch_index str
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    upsert_kafka FlinkJobTableUpsertKafkaArgs
    Kafka upsert connector configuration.
    integrationId String
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    project String
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql String
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName String
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableName String
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable String
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType String
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields List<String>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat String
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode String
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic String
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat String
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions String
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex String
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka Property Map
    Kafka upsert connector configuration.

    Outputs

    All input properties are implicitly available as output properties. Additionally, the FlinkJobTable resource produces the following output properties:

    Id string
    The provider-assigned unique ID for this managed resource.
    TableId string
    The Table ID of the flink table in the flink service.
    Id string
    The provider-assigned unique ID for this managed resource.
    TableId string
    The Table ID of the flink table in the flink service.
    id String
    The provider-assigned unique ID for this managed resource.
    tableId String
    The Table ID of the flink table in the flink service.
    id string
    The provider-assigned unique ID for this managed resource.
    tableId string
    The Table ID of the flink table in the flink service.
    id str
    The provider-assigned unique ID for this managed resource.
    table_id str
    The Table ID of the flink table in the flink service.
    id String
    The provider-assigned unique ID for this managed resource.
    tableId String
    The Table ID of the flink table in the flink service.

    Look up Existing FlinkJobTable Resource

    Get an existing FlinkJobTable resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: FlinkJobTableState, opts?: CustomResourceOptions): FlinkJobTable
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            integration_id: Optional[str] = None,
            jdbc_table: Optional[str] = None,
            kafka_connector_type: Optional[str] = None,
            kafka_key_fields: Optional[Sequence[str]] = None,
            kafka_key_format: Optional[str] = None,
            kafka_startup_mode: Optional[str] = None,
            kafka_topic: Optional[str] = None,
            kafka_value_fields_include: Optional[str] = None,
            kafka_value_format: Optional[str] = None,
            like_options: Optional[str] = None,
            opensearch_index: Optional[str] = None,
            project: Optional[str] = None,
            schema_sql: Optional[str] = None,
            service_name: Optional[str] = None,
            table_id: Optional[str] = None,
            table_name: Optional[str] = None,
            upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None) -> FlinkJobTable
    func GetFlinkJobTable(ctx *Context, name string, id IDInput, state *FlinkJobTableState, opts ...ResourceOption) (*FlinkJobTable, error)
    public static FlinkJobTable Get(string name, Input<string> id, FlinkJobTableState? state, CustomResourceOptions? opts = null)
    public static FlinkJobTable get(String name, Output<String> id, FlinkJobTableState state, CustomResourceOptions options)
    resources:  _:    type: aiven:FlinkJobTable    get:      id: ${id}
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    IntegrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    JdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFields List<string>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    KafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    KafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    LikeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    OpensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    Project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    SchemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    ServiceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    TableId string
    The Table ID of the flink table in the flink service.
    TableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    UpsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    IntegrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    JdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFields []string
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    KafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    KafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    KafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    KafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    LikeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    OpensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    Project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    SchemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    ServiceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    TableId string
    The Table ID of the flink table in the flink service.
    TableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    UpsertKafka FlinkJobTableUpsertKafkaArgs
    Kafka upsert connector configuration.
    integrationId String
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable String
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType String
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields List<String>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat String
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode String
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic String
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat String
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions String
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex String
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    project String
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql String
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName String
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableId String
    The Table ID of the flink table in the flink service.
    tableName String
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    integrationId string
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable string
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType string
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields string[]
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat string
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode string
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic string
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat string
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions string
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex string
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    project string
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql string
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName string
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableId string
    The Table ID of the flink table in the flink service.
    tableName string
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka FlinkJobTableUpsertKafka
    Kafka upsert connector configuration.
    integration_id str
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    jdbc_table str
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafka_connector_type str
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafka_key_fields Sequence[str]
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafka_key_format str
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafka_startup_mode str
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafka_topic str
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafka_value_fields_include str
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafka_value_format str
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    like_options str
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearch_index str
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    project str
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schema_sql str
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    service_name str
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    table_id str
    The Table ID of the flink table in the flink service.
    table_name str
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    upsert_kafka FlinkJobTableUpsertKafkaArgs
    Kafka upsert connector configuration.
    integrationId String
    The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    jdbcTable String
    Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaConnectorType String
    When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFields List<String>
    Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
    kafkaKeyFormat String
    Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    kafkaStartupMode String
    Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    kafkaTopic String
    Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    kafkaValueFormat String
    Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    likeOptions String
    LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
    opensearchIndex String
    For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
    project String
    Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    schemaSql String
    The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
    serviceName String
    Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
    tableId String
    The Table ID of the flink table in the flink service.
    tableName String
    Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
    upsertKafka Property Map
    Kafka upsert connector configuration.

    Supporting Types

    FlinkJobTableUpsertKafka, FlinkJobTableUpsertKafkaArgs

    KeyFields List<string>
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    KeyFormat string
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    ScanStartupMode string
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    Topic string
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    ValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    ValueFormat string
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    KeyFields []string
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    KeyFormat string
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    ScanStartupMode string
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    Topic string
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    ValueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    ValueFormat string
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    keyFields List<String>
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    keyFormat String
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    scanStartupMode String
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    topic String
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    valueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    valueFormat String
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    keyFields string[]
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    keyFormat string
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    scanStartupMode string
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    topic string
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    valueFieldsInclude string
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    valueFormat string
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    key_fields Sequence[str]
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    key_format str
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    scan_startup_mode str
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    topic str
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    value_fields_include str
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    value_format str
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    keyFields List<String>
    Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
    keyFormat String
    Sets the format that is used to convert the key part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.
    scanStartupMode String
    Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.
    topic String
    Topic name This property cannot be changed, doing so forces recreation of the resource.
    valueFieldsInclude String
    Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.
    valueFormat String
    Sets the format that is used to convert the value part of Kafka messages. The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

    Import

     $ pulumi import aiven:index/flinkJobTable:FlinkJobTable table project/service_name/table_id
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    Aiven pulumi/pulumi-aiven
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the aiven Terraform Provider.
    aiven logo
    Viewing docs for Aiven v5.6.0 (Older version)
    published on Monday, Mar 9, 2026 by Pulumi
      Try Pulumi Cloud free. Your team will thank you.