published on Monday, Mar 9, 2026 by Pulumi
published on Monday, Mar 9, 2026 by Pulumi
The Flink Table resource allows the creation and management of Aiven Tables.
Example Usage
using System.Collections.Generic;
using Pulumi;
using Aiven = Pulumi.Aiven;
return await Deployment.RunAsync(() =>
{
var table = new Aiven.FlinkJobTable("table", new()
{
Project = data.Aiven_project.Pr1.Project,
ServiceName = aiven_flink.Flink.Service_name,
TableName = "<TABLE_NAME>",
IntegrationId = aiven_service_integration.Flink_kafka.Service_id,
JdbcTable = "<JDBC_TABLE_NAME>",
KafkaTopic = aiven_kafka_topic.Table_topic.Topic_name,
SchemaSql = @" `+""`cpu`""+` INT,
`+""`node`""+` INT,
`+""`occurred_at`""+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+""`occurred_at`""+` AS `+""`occurred_at`""+` - INTERVAL '5' SECOND
",
});
});
package main
import (
"github.com/pulumi/pulumi-aiven/sdk/v5/go/aiven"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := aiven.NewFlinkJobTable(ctx, "table", &aiven.FlinkJobTableArgs{
Project: pulumi.Any(data.Aiven_project.Pr1.Project),
ServiceName: pulumi.Any(aiven_flink.Flink.Service_name),
TableName: pulumi.String("<TABLE_NAME>"),
IntegrationId: pulumi.Any(aiven_service_integration.Flink_kafka.Service_id),
JdbcTable: pulumi.String("<JDBC_TABLE_NAME>"),
KafkaTopic: pulumi.Any(aiven_kafka_topic.Table_topic.Topic_name),
SchemaSql: pulumi.String(" `+\"`cpu`\"+` INT,\n `+\"`node`\"+` INT,\n `+\"`occurred_at`\"+` TIMESTAMP(3) METADATA FROM 'timestamp',\n WATERMARK FOR `+\"`occurred_at`\"+` AS `+\"`occurred_at`\"+` - INTERVAL '5' SECOND\n"),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.aiven.FlinkJobTable;
import com.pulumi.aiven.FlinkJobTableArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var table = new FlinkJobTable("table", FlinkJobTableArgs.builder()
.project(data.aiven_project().pr1().project())
.serviceName(aiven_flink.flink().service_name())
.tableName("<TABLE_NAME>")
.integrationId(aiven_service_integration.flink_kafka().service_id())
.jdbcTable("<JDBC_TABLE_NAME>")
.kafkaTopic(aiven_kafka_topic.table_topic().topic_name())
.schemaSql("""
`+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
""")
.build());
}
}
import * as pulumi from "@pulumi/pulumi";
import * as aiven from "@pulumi/aiven";
const table = new aiven.FlinkJobTable("table", {
project: data.aiven_project.pr1.project,
serviceName: aiven_flink.flink.service_name,
tableName: "<TABLE_NAME>",
integrationId: aiven_service_integration.flink_kafka.service_id,
jdbcTable: "<JDBC_TABLE_NAME>",
kafkaTopic: aiven_kafka_topic.table_topic.topic_name,
schemaSql: ` \`+"\`cpu\`"+\` INT,
\`+"\`node\`"+\` INT,
\`+"\`occurred_at\`"+\` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR \`+"\`occurred_at\`"+\` AS \`+"\`occurred_at\`"+\` - INTERVAL '5' SECOND
`,
});
import pulumi
import pulumi_aiven as aiven
table = aiven.FlinkJobTable("table",
project=data["aiven_project"]["pr1"]["project"],
service_name=aiven_flink["flink"]["service_name"],
table_name="<TABLE_NAME>",
integration_id=aiven_service_integration["flink_kafka"]["service_id"],
jdbc_table="<JDBC_TABLE_NAME>",
kafka_topic=aiven_kafka_topic["table_topic"]["topic_name"],
schema_sql=""" `+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
""")
resources:
table:
type: aiven:FlinkJobTable
properties:
project: ${data.aiven_project.pr1.project}
serviceName: ${aiven_flink.flink.service_name}
tableName: <TABLE_NAME>
integrationId: ${aiven_service_integration.flink_kafka.service_id}
# valid if the service integration refers to a postgres or mysql service
jdbcTable: <JDBC_TABLE_NAME>
# valid if the service integration refers to a kafka service
kafkaTopic: ${aiven_kafka_topic.table_topic.topic_name}
schemaSql: |2
`+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
Create FlinkJobTable Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new FlinkJobTable(name: string, args: FlinkJobTableArgs, opts?: CustomResourceOptions);@overload
def FlinkJobTable(resource_name: str,
args: FlinkJobTableArgs,
opts: Optional[ResourceOptions] = None)
@overload
def FlinkJobTable(resource_name: str,
opts: Optional[ResourceOptions] = None,
project: Optional[str] = None,
table_name: Optional[str] = None,
service_name: Optional[str] = None,
schema_sql: Optional[str] = None,
integration_id: Optional[str] = None,
kafka_key_format: Optional[str] = None,
kafka_topic: Optional[str] = None,
kafka_value_fields_include: Optional[str] = None,
kafka_value_format: Optional[str] = None,
like_options: Optional[str] = None,
opensearch_index: Optional[str] = None,
kafka_startup_mode: Optional[str] = None,
kafka_key_fields: Optional[Sequence[str]] = None,
kafka_connector_type: Optional[str] = None,
jdbc_table: Optional[str] = None,
upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None)func NewFlinkJobTable(ctx *Context, name string, args FlinkJobTableArgs, opts ...ResourceOption) (*FlinkJobTable, error)public FlinkJobTable(string name, FlinkJobTableArgs args, CustomResourceOptions? opts = null)
public FlinkJobTable(String name, FlinkJobTableArgs args)
public FlinkJobTable(String name, FlinkJobTableArgs args, CustomResourceOptions options)
type: aiven:FlinkJobTable
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var flinkJobTableResource = new Aiven.FlinkJobTable("flinkJobTableResource", new()
{
Project = "string",
TableName = "string",
ServiceName = "string",
SchemaSql = "string",
IntegrationId = "string",
KafkaKeyFormat = "string",
KafkaTopic = "string",
KafkaValueFieldsInclude = "string",
KafkaValueFormat = "string",
LikeOptions = "string",
OpensearchIndex = "string",
KafkaStartupMode = "string",
KafkaKeyFields = new[]
{
"string",
},
KafkaConnectorType = "string",
JdbcTable = "string",
UpsertKafka = new Aiven.Inputs.FlinkJobTableUpsertKafkaArgs
{
KeyFields = new[]
{
"string",
},
KeyFormat = "string",
ScanStartupMode = "string",
Topic = "string",
ValueFieldsInclude = "string",
ValueFormat = "string",
},
});
example, err := aiven.NewFlinkJobTable(ctx, "flinkJobTableResource", &aiven.FlinkJobTableArgs{
Project: pulumi.String("string"),
TableName: pulumi.String("string"),
ServiceName: pulumi.String("string"),
SchemaSql: pulumi.String("string"),
IntegrationId: pulumi.String("string"),
KafkaKeyFormat: pulumi.String("string"),
KafkaTopic: pulumi.String("string"),
KafkaValueFieldsInclude: pulumi.String("string"),
KafkaValueFormat: pulumi.String("string"),
LikeOptions: pulumi.String("string"),
OpensearchIndex: pulumi.String("string"),
KafkaStartupMode: pulumi.String("string"),
KafkaKeyFields: pulumi.StringArray{
pulumi.String("string"),
},
KafkaConnectorType: pulumi.String("string"),
JdbcTable: pulumi.String("string"),
UpsertKafka: &aiven.FlinkJobTableUpsertKafkaArgs{
KeyFields: pulumi.StringArray{
pulumi.String("string"),
},
KeyFormat: pulumi.String("string"),
ScanStartupMode: pulumi.String("string"),
Topic: pulumi.String("string"),
ValueFieldsInclude: pulumi.String("string"),
ValueFormat: pulumi.String("string"),
},
})
var flinkJobTableResource = new FlinkJobTable("flinkJobTableResource", FlinkJobTableArgs.builder()
.project("string")
.tableName("string")
.serviceName("string")
.schemaSql("string")
.integrationId("string")
.kafkaKeyFormat("string")
.kafkaTopic("string")
.kafkaValueFieldsInclude("string")
.kafkaValueFormat("string")
.likeOptions("string")
.opensearchIndex("string")
.kafkaStartupMode("string")
.kafkaKeyFields("string")
.kafkaConnectorType("string")
.jdbcTable("string")
.upsertKafka(FlinkJobTableUpsertKafkaArgs.builder()
.keyFields("string")
.keyFormat("string")
.scanStartupMode("string")
.topic("string")
.valueFieldsInclude("string")
.valueFormat("string")
.build())
.build());
flink_job_table_resource = aiven.FlinkJobTable("flinkJobTableResource",
project="string",
table_name="string",
service_name="string",
schema_sql="string",
integration_id="string",
kafka_key_format="string",
kafka_topic="string",
kafka_value_fields_include="string",
kafka_value_format="string",
like_options="string",
opensearch_index="string",
kafka_startup_mode="string",
kafka_key_fields=["string"],
kafka_connector_type="string",
jdbc_table="string",
upsert_kafka={
"key_fields": ["string"],
"key_format": "string",
"scan_startup_mode": "string",
"topic": "string",
"value_fields_include": "string",
"value_format": "string",
})
const flinkJobTableResource = new aiven.FlinkJobTable("flinkJobTableResource", {
project: "string",
tableName: "string",
serviceName: "string",
schemaSql: "string",
integrationId: "string",
kafkaKeyFormat: "string",
kafkaTopic: "string",
kafkaValueFieldsInclude: "string",
kafkaValueFormat: "string",
likeOptions: "string",
opensearchIndex: "string",
kafkaStartupMode: "string",
kafkaKeyFields: ["string"],
kafkaConnectorType: "string",
jdbcTable: "string",
upsertKafka: {
keyFields: ["string"],
keyFormat: "string",
scanStartupMode: "string",
topic: "string",
valueFieldsInclude: "string",
valueFormat: "string",
},
});
type: aiven:FlinkJobTable
properties:
integrationId: string
jdbcTable: string
kafkaConnectorType: string
kafkaKeyFields:
- string
kafkaKeyFormat: string
kafkaStartupMode: string
kafkaTopic: string
kafkaValueFieldsInclude: string
kafkaValueFormat: string
likeOptions: string
opensearchIndex: string
project: string
schemaSql: string
serviceName: string
tableName: string
upsertKafka:
keyFields:
- string
keyFormat: string
scanStartupMode: string
topic: string
valueFieldsInclude: string
valueFormat: string
FlinkJobTable Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The FlinkJobTable resource accepts the following input properties:
- Integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - Project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Key List<string>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- Integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - Project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Key []stringFields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args - Kafka upsert connector configuration.
- integration
Id String - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - project String
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name String - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table String - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key List<String>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup StringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic String - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value StringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options String - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index String - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key string[]Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- integration_
id str - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - project str
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema_
sql str - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service_
name str - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table_
name str - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc_
table str - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
connector_ strtype - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka_
key_ Sequence[str]fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
key_ strformat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka_
startup_ strmode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka_
topic str - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
value_ strfields_ include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka_
value_ strformat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like_
options str - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch_
index str - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- upsert_
kafka FlinkJob Table Upsert Kafka Args - Kafka upsert connector configuration.
- integration
Id String - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - project String
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name String - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table String - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key List<String>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup StringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic String - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value StringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options String - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index String - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka Property Map - Kafka upsert connector configuration.
Outputs
All input properties are implicitly available as output properties. Additionally, the FlinkJobTable resource produces the following output properties:
Look up Existing FlinkJobTable Resource
Get an existing FlinkJobTable resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FlinkJobTableState, opts?: CustomResourceOptions): FlinkJobTable@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
integration_id: Optional[str] = None,
jdbc_table: Optional[str] = None,
kafka_connector_type: Optional[str] = None,
kafka_key_fields: Optional[Sequence[str]] = None,
kafka_key_format: Optional[str] = None,
kafka_startup_mode: Optional[str] = None,
kafka_topic: Optional[str] = None,
kafka_value_fields_include: Optional[str] = None,
kafka_value_format: Optional[str] = None,
like_options: Optional[str] = None,
opensearch_index: Optional[str] = None,
project: Optional[str] = None,
schema_sql: Optional[str] = None,
service_name: Optional[str] = None,
table_id: Optional[str] = None,
table_name: Optional[str] = None,
upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None) -> FlinkJobTablefunc GetFlinkJobTable(ctx *Context, name string, id IDInput, state *FlinkJobTableState, opts ...ResourceOption) (*FlinkJobTable, error)public static FlinkJobTable Get(string name, Input<string> id, FlinkJobTableState? state, CustomResourceOptions? opts = null)public static FlinkJobTable get(String name, Output<String> id, FlinkJobTableState state, CustomResourceOptions options)resources: _: type: aiven:FlinkJobTable get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - Jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Key List<string>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- Project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Id string - The Table ID of the flink table in the flink service.
- Table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- Integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - Jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Key []stringFields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- Project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Id string - The Table ID of the flink table in the flink service.
- Table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args - Kafka upsert connector configuration.
- integration
Id String - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - jdbc
Table String - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key List<String>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup StringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic String - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value StringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options String - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index String - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- project String
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id String - The Table ID of the flink table in the flink service.
- table
Name String - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- integration
Id string - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - jdbc
Table string - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector stringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key string[]Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key stringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup stringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic string - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value stringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value stringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options string - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index string - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- project string
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql string - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name string - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id string - The Table ID of the flink table in the flink service.
- table
Name string - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka - Kafka upsert connector configuration.
- integration_
id str - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - jdbc_
table str - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
connector_ strtype - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka_
key_ Sequence[str]fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
key_ strformat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka_
startup_ strmode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka_
topic str - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
value_ strfields_ include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka_
value_ strformat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like_
options str - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch_
index str - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- project str
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema_
sql str - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service_
name str - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table_
id str - The Table ID of the flink table in the flink service.
- table_
name str - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert_
kafka FlinkJob Table Upsert Kafka Args - Kafka upsert connector configuration.
- integration
Id String - The id of the service integration that is used with this table. It must have the service integration type
flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource. - jdbc
Table String - Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType - When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafkaandupsert-kafka. This property cannot be changed, doing so forces recreation of the resource. - kafka
Key List<String>Fields - Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat - Kafka Key Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - kafka
Startup StringMode - Startup mode The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - kafka
Topic String - Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - kafka
Value StringFormat - Kafka Value Format The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - like
Options String - LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- opensearch
Index String - For an OpenSearch table, the OpenSearch index the table outputs to. This property cannot be changed, doing so forces recreation of the resource.
- project String
- Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String - The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String - Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id String - The Table ID of the flink table in the flink service.
- table
Name String - Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka Property Map - Kafka upsert connector configuration.
Supporting Types
FlinkJobTableUpsertKafka, FlinkJobTableUpsertKafkaArgs
- Key
Fields List<string> - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- Key
Format string - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Scan
Startup stringMode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Topic string
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- Value
Fields stringInclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Value
Format string - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
- Key
Fields []string - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- Key
Format string - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - Scan
Startup stringMode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - Topic string
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- Value
Fields stringInclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - Value
Format string - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
- key
Fields List<String> - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- key
Format String - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - scan
Startup StringMode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - topic String
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- value
Fields StringInclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - value
Format String - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
- key
Fields string[] - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- key
Format string - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - scan
Startup stringMode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - topic string
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- value
Fields stringInclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - value
Format string - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
- key_
fields Sequence[str] - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- key_
format str - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - scan_
startup_ strmode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - topic str
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- value_
fields_ strinclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - value_
format str - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
- key
Fields List<String> - Defines the columns from the SQL schema of the data table that are considered keys in the Kafka messages. This property cannot be changed, doing so forces recreation of the resource.
- key
Format String - Sets the format that is used to convert the key part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource. - scan
Startup StringMode - Controls the startup method for the Kafka consumer that Aiven for Apache Flink is using. The possible values are
earliest-offset,latest-offset,group-offsetsandtimestamp. This property cannot be changed, doing so forces recreation of the resource. - topic String
- Topic name This property cannot be changed, doing so forces recreation of the resource.
- value
Fields StringInclude - Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource. - value
Format String - Sets the format that is used to convert the value part of Kafka messages. The possible values are
avro,avro-confluent,debezium-avro-confluent,debezium-jsonandjson. This property cannot be changed, doing so forces recreation of the resource.
Import
$ pulumi import aiven:index/flinkJobTable:FlinkJobTable table project/service_name/table_id
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Aiven pulumi/pulumi-aiven
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
aivenTerraform Provider.
published on Monday, Mar 9, 2026 by Pulumi
