Aiven
FlinkJobTable
The Flink Table resource allows the creation and management of Aiven Tables.
Example Usage
using Pulumi;
using Aiven = Pulumi.Aiven;
class MyStack : Stack
{
public MyStack()
{
var table = new Aiven.FlinkJobTable("table", new Aiven.FlinkJobTableArgs
{
Project = data.Aiven_project.Pr1.Project,
ServiceName = aiven_flink.Flink.Service_name,
TableName = "<TABLE_NAME>",
IntegrationId = aiven_service_integration.Flink_kafka.Service_id,
JdbcTable = "<JDBC_TABLE_NAME>",
KafkaTopic = aiven_kafka_topic.Table_topic.Topic_name,
SchemaSql = @" `+""`cpu`""+` INT,
`+""`node`""+` INT,
`+""`occurred_at`""+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+""`occurred_at`""+` AS `+""`occurred_at`""+` - INTERVAL '5' SECOND
",
});
}
}
package main
import (
"fmt"
"github.com/pulumi/pulumi-aiven/sdk/v5/go/aiven"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := aiven.NewFlinkJobTable(ctx, "table", &aiven.FlinkJobTableArgs{
Project: pulumi.Any(data.Aiven_project.Pr1.Project),
ServiceName: pulumi.Any(aiven_flink.Flink.Service_name),
TableName: pulumi.String("<TABLE_NAME>"),
IntegrationId: pulumi.Any(aiven_service_integration.Flink_kafka.Service_id),
JdbcTable: pulumi.String("<JDBC_TABLE_NAME>"),
KafkaTopic: pulumi.Any(aiven_kafka_topic.Table_topic.Topic_name),
SchemaSql: pulumi.String(fmt.Sprintf("%v%v%v%v", " `+\"`cpu`\"+` INT,\n", " `+\"`node`\"+` INT,\n", " `+\"`occurred_at`\"+` TIMESTAMP(3) METADATA FROM 'timestamp',\n", " WATERMARK FOR `+\"`occurred_at`\"+` AS `+\"`occurred_at`\"+` - INTERVAL '5' SECOND\n")),
})
if err != nil {
return err
}
return nil
})
}
package generated_program;
import java.util.*;
import java.io.*;
import java.nio.*;
import com.pulumi.*;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var table = new FlinkJobTable("table", FlinkJobTableArgs.builder()
.project(data.aiven_project().pr1().project())
.serviceName(aiven_flink.flink().service_name())
.tableName("<TABLE_NAME>")
.integrationId(aiven_service_integration.flink_kafka().service_id())
.jdbcTable("<JDBC_TABLE_NAME>")
.kafkaTopic(aiven_kafka_topic.table_topic().topic_name())
.schemaSql("""
`+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
""")
.build());
}
}
import pulumi
import pulumi_aiven as aiven
table = aiven.FlinkJobTable("table",
project=data["aiven_project"]["pr1"]["project"],
service_name=aiven_flink["flink"]["service_name"],
table_name="<TABLE_NAME>",
integration_id=aiven_service_integration["flink_kafka"]["service_id"],
jdbc_table="<JDBC_TABLE_NAME>",
kafka_topic=aiven_kafka_topic["table_topic"]["topic_name"],
schema_sql=""" `+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
""")
import * as pulumi from "@pulumi/pulumi";
import * as aiven from "@pulumi/aiven";
const table = new aiven.FlinkJobTable("table", {
project: data.aiven_project.pr1.project,
serviceName: aiven_flink.flink.service_name,
tableName: "<TABLE_NAME>",
integrationId: aiven_service_integration.flink_kafka.service_id,
jdbcTable: "<JDBC_TABLE_NAME>",
kafkaTopic: aiven_kafka_topic.table_topic.topic_name,
schemaSql: ` `+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
`,
});
resources:
table:
type: aiven:FlinkJobTable
properties:
project: ${data.aiven_project.pr1.project}
serviceName: ${aiven_flink.flink.service_name}
tableName: <TABLE_NAME>
integrationId: ${aiven_service_integration.flink_kafka.service_id}
jdbcTable: <JDBC_TABLE_NAME>
kafkaTopic: ${aiven_kafka_topic.table_topic.topic_name}
schemaSql: |2
`+"`cpu`"+` INT,
`+"`node`"+` INT,
`+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
Create a FlinkJobTable Resource
new FlinkJobTable(name: string, args: FlinkJobTableArgs, opts?: CustomResourceOptions);
@overload
def FlinkJobTable(resource_name: str,
opts: Optional[ResourceOptions] = None,
integration_id: Optional[str] = None,
jdbc_table: Optional[str] = None,
kafka_connector_type: Optional[str] = None,
kafka_key_fields: Optional[Sequence[str]] = None,
kafka_key_format: Optional[str] = None,
kafka_startup_mode: Optional[str] = None,
kafka_topic: Optional[str] = None,
kafka_value_fields_include: Optional[str] = None,
kafka_value_format: Optional[str] = None,
like_options: Optional[str] = None,
project: Optional[str] = None,
schema_sql: Optional[str] = None,
service_name: Optional[str] = None,
table_name: Optional[str] = None,
upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None)
@overload
def FlinkJobTable(resource_name: str,
args: FlinkJobTableArgs,
opts: Optional[ResourceOptions] = None)
func NewFlinkJobTable(ctx *Context, name string, args FlinkJobTableArgs, opts ...ResourceOption) (*FlinkJobTable, error)
public FlinkJobTable(string name, FlinkJobTableArgs args, CustomResourceOptions? opts = null)
public FlinkJobTable(String name, FlinkJobTableArgs args)
public FlinkJobTable(String name, FlinkJobTableArgs args, CustomResourceOptions options)
type: aiven:FlinkJobTable
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FlinkJobTableArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
FlinkJobTable Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
The FlinkJobTable resource accepts the following input properties:
- Integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- Project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Key List<string>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- Integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- Project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Key []stringFields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id String The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- project String
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name String Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table String Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key List<String>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup StringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic String Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value StringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options String LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key string[]Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration_
id str The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- project str
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema_
sql str The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service_
name str Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table_
name str Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc_
table str Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
connector_ strtype When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
key_ Sequence[str]fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
key_ strformat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
startup_ strmode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
topic str Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
value_ strfields_ include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
value_ strformat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like_
options str LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- upsert_
kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id String The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- project String
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Name String Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- jdbc
Table String Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key List<String>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup StringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic String Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value StringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options String LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka Property Map Kafka upsert connector configuration.
Outputs
All input properties are implicitly available as output properties. Additionally, the FlinkJobTable resource produces the following output properties:
Look up an Existing FlinkJobTable Resource
Get an existing FlinkJobTable resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FlinkJobTableState, opts?: CustomResourceOptions): FlinkJobTable
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
integration_id: Optional[str] = None,
jdbc_table: Optional[str] = None,
kafka_connector_type: Optional[str] = None,
kafka_key_fields: Optional[Sequence[str]] = None,
kafka_key_format: Optional[str] = None,
kafka_startup_mode: Optional[str] = None,
kafka_topic: Optional[str] = None,
kafka_value_fields_include: Optional[str] = None,
kafka_value_format: Optional[str] = None,
like_options: Optional[str] = None,
project: Optional[str] = None,
schema_sql: Optional[str] = None,
service_name: Optional[str] = None,
table_id: Optional[str] = None,
table_name: Optional[str] = None,
upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None) -> FlinkJobTable
func GetFlinkJobTable(ctx *Context, name string, id IDInput, state *FlinkJobTableState, opts ...ResourceOption) (*FlinkJobTable, error)
public static FlinkJobTable Get(string name, Input<string> id, FlinkJobTableState? state, CustomResourceOptions? opts = null)
public static FlinkJobTable get(String name, Output<String> id, FlinkJobTableState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- Jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Key List<string>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Id string The Table ID of the flink table in the flink service.
- Table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- Integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- Jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Key []stringFields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- Kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- Kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- Like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- Project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- Service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- Table
Id string The Table ID of the flink table in the flink service.
- Table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- Upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id String The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- jdbc
Table String Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key List<String>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup StringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic String Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value StringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options String LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- project String
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id String The Table ID of the flink table in the flink service.
- table
Name String Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id string The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- jdbc
Table string Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector stringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key string[]Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key stringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup stringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic string Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value stringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value stringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options string LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- project string
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql string The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name string Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id string The Table ID of the flink table in the flink service.
- table
Name string Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration_
id str The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- jdbc_
table str Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
connector_ strtype When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
key_ Sequence[str]fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
key_ strformat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
startup_ strmode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
topic str Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka_
value_ strfields_ include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka_
value_ strformat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like_
options str LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- project str
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema_
sql str The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service_
name str Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table_
id str The Table ID of the flink table in the flink service.
- table_
name str Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert_
kafka FlinkJob Table Upsert Kafka Args Kafka upsert connector configuration.
- integration
Id String The id of the service integration that is used with this table. It must have the service integration type
flink
. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.- jdbc
Table String Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Connector StringType When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are
kafka
andupsert-kafka
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Key List<String>Fields Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Key StringFormat Kafka Key Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Startup StringMode Startup mode The possible values are
earliest-offset
,latest-offset
,group-offsets
andtimestamp
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Topic String Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.
- kafka
Value StringFields Include Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are
[ALL EXCEPT_KEY]
. This property cannot be changed, doing so forces recreation of the resource.- kafka
Value StringFormat Kafka Value Format The possible values are
avro
,avro-confluent
,debezium-avro-confluent
,debezium-json
andjson
. This property cannot be changed, doing so forces recreation of the resource.- like
Options String LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.
- project String
Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- schema
Sql String The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.
- service
Name String Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.
- table
Id String The Table ID of the flink table in the flink service.
- table
Name String Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.
- upsert
Kafka Property Map Kafka upsert connector configuration.
Supporting Types
FlinkJobTableUpsertKafka
- Key
Fields List<string> - Key
Format string - Scan
Startup stringMode - Topic string
- Value
Fields stringInclude - Value
Format string
- Key
Fields []string - Key
Format string - Scan
Startup stringMode - Topic string
- Value
Fields stringInclude - Value
Format string
- key
Fields List<String> - key
Format String - scan
Startup StringMode - topic String
- value
Fields StringInclude - value
Format String
- key
Fields string[] - key
Format string - scan
Startup stringMode - topic string
- value
Fields stringInclude - value
Format string
- key_
fields Sequence[str] - key_
format str - scan_
startup_ strmode - topic str
- value_
fields_ strinclude - value_
format str
- key
Fields List<String> - key
Format String - scan
Startup StringMode - topic String
- value
Fields StringInclude - value
Format String
Import
$ pulumi import aiven:index/flinkJobTable:FlinkJobTable table project/service_name/table_id
Package Details
- Repository
- https://github.com/pulumi/pulumi-aiven
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
aiven
Terraform Provider.