Aiven

Pulumi Official
Package maintained by Pulumi
v5.2.0 published on Wednesday, Jul 20, 2022 by Pulumi

FlinkJobTable

The Flink Table resource allows the creation and management of Aiven Tables.

Example Usage

using Pulumi;
using Aiven = Pulumi.Aiven;

class MyStack : Stack
{
    public MyStack()
    {
        var table = new Aiven.FlinkJobTable("table", new Aiven.FlinkJobTableArgs
        {
            Project = data.Aiven_project.Pr1.Project,
            ServiceName = aiven_flink.Flink.Service_name,
            TableName = "<TABLE_NAME>",
            IntegrationId = aiven_service_integration.Flink_kafka.Service_id,
            JdbcTable = "<JDBC_TABLE_NAME>",
            KafkaTopic = aiven_kafka_topic.Table_topic.Topic_name,
            SchemaSql = @"      `+""`cpu`""+` INT,
      `+""`node`""+` INT,
      `+""`occurred_at`""+` TIMESTAMP(3) METADATA FROM 'timestamp',
      WATERMARK FOR `+""`occurred_at`""+` AS `+""`occurred_at`""+` - INTERVAL '5' SECOND
",
        });
    }

}
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-aiven/sdk/v5/go/aiven"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := aiven.NewFlinkJobTable(ctx, "table", &aiven.FlinkJobTableArgs{
			Project:       pulumi.Any(data.Aiven_project.Pr1.Project),
			ServiceName:   pulumi.Any(aiven_flink.Flink.Service_name),
			TableName:     pulumi.String("<TABLE_NAME>"),
			IntegrationId: pulumi.Any(aiven_service_integration.Flink_kafka.Service_id),
			JdbcTable:     pulumi.String("<JDBC_TABLE_NAME>"),
			KafkaTopic:    pulumi.Any(aiven_kafka_topic.Table_topic.Topic_name),
			SchemaSql:     pulumi.String(fmt.Sprintf("%v%v%v%v", "      `+\"`cpu`\"+` INT,\n", "      `+\"`node`\"+` INT,\n", "      `+\"`occurred_at`\"+` TIMESTAMP(3) METADATA FROM 'timestamp',\n", "      WATERMARK FOR `+\"`occurred_at`\"+` AS `+\"`occurred_at`\"+` - INTERVAL '5' SECOND\n")),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import java.util.*;
import java.io.*;
import java.nio.*;
import com.pulumi.*;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var table = new FlinkJobTable("table", FlinkJobTableArgs.builder()        
            .project(data.aiven_project().pr1().project())
            .serviceName(aiven_flink.flink().service_name())
            .tableName("<TABLE_NAME>")
            .integrationId(aiven_service_integration.flink_kafka().service_id())
            .jdbcTable("<JDBC_TABLE_NAME>")
            .kafkaTopic(aiven_kafka_topic.table_topic().topic_name())
            .schemaSql("""
      `+"`cpu`"+` INT,
      `+"`node`"+` INT,
      `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
      WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
            """)
            .build());

    }
}
import pulumi
import pulumi_aiven as aiven

table = aiven.FlinkJobTable("table",
    project=data["aiven_project"]["pr1"]["project"],
    service_name=aiven_flink["flink"]["service_name"],
    table_name="<TABLE_NAME>",
    integration_id=aiven_service_integration["flink_kafka"]["service_id"],
    jdbc_table="<JDBC_TABLE_NAME>",
    kafka_topic=aiven_kafka_topic["table_topic"]["topic_name"],
    schema_sql="""      `+"`cpu`"+` INT,
      `+"`node`"+` INT,
      `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
      WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
""")
import * as pulumi from "@pulumi/pulumi";
import * as aiven from "@pulumi/aiven";

const table = new aiven.FlinkJobTable("table", {
    project: data.aiven_project.pr1.project,
    serviceName: aiven_flink.flink.service_name,
    tableName: "<TABLE_NAME>",
    integrationId: aiven_service_integration.flink_kafka.service_id,
    jdbcTable: "<JDBC_TABLE_NAME>",
    kafkaTopic: aiven_kafka_topic.table_topic.topic_name,
    schemaSql: `      `+"`cpu`"+` INT,
      `+"`node`"+` INT,
      `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
      WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND
`,
});
resources:
  table:
    type: aiven:FlinkJobTable
    properties:
      project: ${data.aiven_project.pr1.project}
      serviceName: ${aiven_flink.flink.service_name}
      tableName: <TABLE_NAME>
      integrationId: ${aiven_service_integration.flink_kafka.service_id}
      jdbcTable: <JDBC_TABLE_NAME>
      kafkaTopic: ${aiven_kafka_topic.table_topic.topic_name}
      schemaSql: |2
              `+"`cpu`"+` INT,
              `+"`node`"+` INT,
              `+"`occurred_at`"+` TIMESTAMP(3) METADATA FROM 'timestamp',
              WATERMARK FOR `+"`occurred_at`"+` AS `+"`occurred_at`"+` - INTERVAL '5' SECOND

Create a FlinkJobTable Resource

new FlinkJobTable(name: string, args: FlinkJobTableArgs, opts?: CustomResourceOptions);
@overload
def FlinkJobTable(resource_name: str,
                  opts: Optional[ResourceOptions] = None,
                  integration_id: Optional[str] = None,
                  jdbc_table: Optional[str] = None,
                  kafka_connector_type: Optional[str] = None,
                  kafka_key_fields: Optional[Sequence[str]] = None,
                  kafka_key_format: Optional[str] = None,
                  kafka_startup_mode: Optional[str] = None,
                  kafka_topic: Optional[str] = None,
                  kafka_value_fields_include: Optional[str] = None,
                  kafka_value_format: Optional[str] = None,
                  like_options: Optional[str] = None,
                  project: Optional[str] = None,
                  schema_sql: Optional[str] = None,
                  service_name: Optional[str] = None,
                  table_name: Optional[str] = None,
                  upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None)
@overload
def FlinkJobTable(resource_name: str,
                  args: FlinkJobTableArgs,
                  opts: Optional[ResourceOptions] = None)
func NewFlinkJobTable(ctx *Context, name string, args FlinkJobTableArgs, opts ...ResourceOption) (*FlinkJobTable, error)
public FlinkJobTable(string name, FlinkJobTableArgs args, CustomResourceOptions? opts = null)
public FlinkJobTable(String name, FlinkJobTableArgs args)
public FlinkJobTable(String name, FlinkJobTableArgs args, CustomResourceOptions options)
type: aiven:FlinkJobTable
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args FlinkJobTableArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args FlinkJobTableArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args FlinkJobTableArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args FlinkJobTableArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args FlinkJobTableArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

FlinkJobTable Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The FlinkJobTable resource accepts the following input properties:

IntegrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

Project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

SchemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

ServiceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

TableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

JdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

KafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFields List<string>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

KafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

KafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

LikeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

UpsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

IntegrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

Project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

SchemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

ServiceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

TableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

JdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

KafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFields []string

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

KafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

KafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

LikeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

UpsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId String

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

project String

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql String

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName String

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableName String

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable String

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType String

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields List<String>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat String

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode String

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic String

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude String

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat String

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions String

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields string[]

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integration_id str

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

project str

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schema_sql str

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

service_name str

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

table_name str

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

jdbc_table str

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafka_connector_type str

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafka_key_fields Sequence[str]

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafka_key_format str

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafka_startup_mode str

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafka_topic str

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafka_value_fields_include str

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafka_value_format str

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

like_options str

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

upsert_kafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId String

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

project String

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql String

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName String

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableName String

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable String

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType String

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields List<String>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat String

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode String

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic String

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude String

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat String

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions String

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka Property Map

Kafka upsert connector configuration.

Outputs

All input properties are implicitly available as output properties. Additionally, the FlinkJobTable resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

TableId string

The Table ID of the flink table in the flink service.

Id string

The provider-assigned unique ID for this managed resource.

TableId string

The Table ID of the flink table in the flink service.

id String

The provider-assigned unique ID for this managed resource.

tableId String

The Table ID of the flink table in the flink service.

id string

The provider-assigned unique ID for this managed resource.

tableId string

The Table ID of the flink table in the flink service.

id str

The provider-assigned unique ID for this managed resource.

table_id str

The Table ID of the flink table in the flink service.

id String

The provider-assigned unique ID for this managed resource.

tableId String

The Table ID of the flink table in the flink service.

Look up an Existing FlinkJobTable Resource

Get an existing FlinkJobTable resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: FlinkJobTableState, opts?: CustomResourceOptions): FlinkJobTable
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        integration_id: Optional[str] = None,
        jdbc_table: Optional[str] = None,
        kafka_connector_type: Optional[str] = None,
        kafka_key_fields: Optional[Sequence[str]] = None,
        kafka_key_format: Optional[str] = None,
        kafka_startup_mode: Optional[str] = None,
        kafka_topic: Optional[str] = None,
        kafka_value_fields_include: Optional[str] = None,
        kafka_value_format: Optional[str] = None,
        like_options: Optional[str] = None,
        project: Optional[str] = None,
        schema_sql: Optional[str] = None,
        service_name: Optional[str] = None,
        table_id: Optional[str] = None,
        table_name: Optional[str] = None,
        upsert_kafka: Optional[FlinkJobTableUpsertKafkaArgs] = None) -> FlinkJobTable
func GetFlinkJobTable(ctx *Context, name string, id IDInput, state *FlinkJobTableState, opts ...ResourceOption) (*FlinkJobTable, error)
public static FlinkJobTable Get(string name, Input<string> id, FlinkJobTableState? state, CustomResourceOptions? opts = null)
public static FlinkJobTable get(String name, Output<String> id, FlinkJobTableState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
IntegrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

JdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

KafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFields List<string>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

KafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

KafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

LikeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

Project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

SchemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

ServiceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

TableId string

The Table ID of the flink table in the flink service.

TableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

UpsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

IntegrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

JdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

KafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFields []string

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

KafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

KafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

KafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

KafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

LikeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

Project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

SchemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

ServiceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

TableId string

The Table ID of the flink table in the flink service.

TableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

UpsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId String

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable String

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType String

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields List<String>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat String

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode String

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic String

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude String

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat String

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions String

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

project String

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql String

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName String

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableId String

The Table ID of the flink table in the flink service.

tableName String

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId string

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable string

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType string

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields string[]

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat string

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode string

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic string

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude string

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat string

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions string

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

project string

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql string

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName string

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableId string

The Table ID of the flink table in the flink service.

tableName string

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integration_id str

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

jdbc_table str

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafka_connector_type str

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafka_key_fields Sequence[str]

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafka_key_format str

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafka_startup_mode str

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafka_topic str

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafka_value_fields_include str

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafka_value_format str

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

like_options str

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

project str

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schema_sql str

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

service_name str

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

table_id str

The Table ID of the flink table in the flink service.

table_name str

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

upsert_kafka FlinkJobTableUpsertKafkaArgs

Kafka upsert connector configuration.

integrationId String

The id of the service integration that is used with this table. It must have the service integration type flink. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

jdbcTable String

Name of the jdbc table that is to be connected to this table. Valid if the service integration id refers to a mysql or postgres service. This property cannot be changed, doing so forces recreation of the resource.

kafkaConnectorType String

When used as a source, upsert Kafka connectors update values that use an existing key and delete values that are null. For sinks, the connector correspondingly writes update or delete messages in a compacted topic. If no matching key is found, the values are added as new entries. For more information, see the Apache Flink documentation The possible values are kafka and upsert-kafka. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFields List<String>

Defines an explicit list of physical columns from the table schema that configure the data type for the key format. This property cannot be changed, doing so forces recreation of the resource.

kafkaKeyFormat String

Kafka Key Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

kafkaStartupMode String

Startup mode The possible values are earliest-offset, latest-offset, group-offsets and timestamp. This property cannot be changed, doing so forces recreation of the resource.

kafkaTopic String

Name of the kafka topic that is to be connected to this table. Valid if the service integration id refers to a kafka service. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFieldsInclude String

Controls how key columns are handled in the message value. Select ALL to include the physical columns of the table schema in the message value. Select EXCEPT_KEY to exclude the physical columns of the table schema from the message value. This is the default for upsert Kafka connectors. The possible values are [ALL EXCEPT_KEY]. This property cannot be changed, doing so forces recreation of the resource.

kafkaValueFormat String

Kafka Value Format The possible values are avro, avro-confluent, debezium-avro-confluent, debezium-json and json. This property cannot be changed, doing so forces recreation of the resource.

likeOptions String

LIKE statement for table creation. This property cannot be changed, doing so forces recreation of the resource.

project String

Identifies the project this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

schemaSql String

The SQL statement to create the table. This property cannot be changed, doing so forces recreation of the resource.

serviceName String

Specifies the name of the service that this resource belongs to. To set up proper dependencies please refer to this variable as a reference. This property cannot be changed, doing so forces recreation of the resource.

tableId String

The Table ID of the flink table in the flink service.

tableName String

Specifies the name of the table. This property cannot be changed, doing so forces recreation of the resource.

upsertKafka Property Map

Kafka upsert connector configuration.

Supporting Types

FlinkJobTableUpsertKafka

KeyFields List<string>
KeyFormat string
ScanStartupMode string
Topic string
ValueFieldsInclude string
ValueFormat string
keyFields List<String>
keyFormat String
scanStartupMode String
topic String
valueFieldsInclude String
valueFormat String
keyFields List<String>
keyFormat String
scanStartupMode String
topic String
valueFieldsInclude String
valueFormat String

Import

 $ pulumi import aiven:index/flinkJobTable:FlinkJobTable table project/service_name/table_id

Package Details

Repository
https://github.com/pulumi/pulumi-aiven
License
Apache-2.0
Notes

This Pulumi package is based on the aiven Terraform Provider.