Create FeatureEngineeringKafkaConfig Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new FeatureEngineeringKafkaConfig(name: string, args: FeatureEngineeringKafkaConfigArgs, opts?: CustomResourceOptions);@overload
def FeatureEngineeringKafkaConfig(resource_name: str,
args: FeatureEngineeringKafkaConfigArgs,
opts: Optional[ResourceOptions] = None)
@overload
def FeatureEngineeringKafkaConfig(resource_name: str,
opts: Optional[ResourceOptions] = None,
auth_config: Optional[FeatureEngineeringKafkaConfigAuthConfigArgs] = None,
bootstrap_servers: Optional[str] = None,
subscription_mode: Optional[FeatureEngineeringKafkaConfigSubscriptionModeArgs] = None,
extra_options: Optional[Mapping[str, str]] = None,
key_schema: Optional[FeatureEngineeringKafkaConfigKeySchemaArgs] = None,
value_schema: Optional[FeatureEngineeringKafkaConfigValueSchemaArgs] = None)func NewFeatureEngineeringKafkaConfig(ctx *Context, name string, args FeatureEngineeringKafkaConfigArgs, opts ...ResourceOption) (*FeatureEngineeringKafkaConfig, error)public FeatureEngineeringKafkaConfig(string name, FeatureEngineeringKafkaConfigArgs args, CustomResourceOptions? opts = null)
public FeatureEngineeringKafkaConfig(String name, FeatureEngineeringKafkaConfigArgs args)
public FeatureEngineeringKafkaConfig(String name, FeatureEngineeringKafkaConfigArgs args, CustomResourceOptions options)
type: databricks:FeatureEngineeringKafkaConfig
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args FeatureEngineeringKafkaConfigArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args FeatureEngineeringKafkaConfigArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args FeatureEngineeringKafkaConfigArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args FeatureEngineeringKafkaConfigArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args FeatureEngineeringKafkaConfigArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var featureEngineeringKafkaConfigResource = new Databricks.FeatureEngineeringKafkaConfig("featureEngineeringKafkaConfigResource", new()
{
AuthConfig = new Databricks.Inputs.FeatureEngineeringKafkaConfigAuthConfigArgs
{
UcServiceCredentialName = "string",
},
BootstrapServers = "string",
SubscriptionMode = new Databricks.Inputs.FeatureEngineeringKafkaConfigSubscriptionModeArgs
{
Assign = "string",
Subscribe = "string",
SubscribePattern = "string",
},
ExtraOptions =
{
{ "string", "string" },
},
KeySchema = new Databricks.Inputs.FeatureEngineeringKafkaConfigKeySchemaArgs
{
JsonSchema = "string",
},
ValueSchema = new Databricks.Inputs.FeatureEngineeringKafkaConfigValueSchemaArgs
{
JsonSchema = "string",
},
});
example, err := databricks.NewFeatureEngineeringKafkaConfig(ctx, "featureEngineeringKafkaConfigResource", &databricks.FeatureEngineeringKafkaConfigArgs{
AuthConfig: &databricks.FeatureEngineeringKafkaConfigAuthConfigArgs{
UcServiceCredentialName: pulumi.String("string"),
},
BootstrapServers: pulumi.String("string"),
SubscriptionMode: &databricks.FeatureEngineeringKafkaConfigSubscriptionModeArgs{
Assign: pulumi.String("string"),
Subscribe: pulumi.String("string"),
SubscribePattern: pulumi.String("string"),
},
ExtraOptions: pulumi.StringMap{
"string": pulumi.String("string"),
},
KeySchema: &databricks.FeatureEngineeringKafkaConfigKeySchemaArgs{
JsonSchema: pulumi.String("string"),
},
ValueSchema: &databricks.FeatureEngineeringKafkaConfigValueSchemaArgs{
JsonSchema: pulumi.String("string"),
},
})
var featureEngineeringKafkaConfigResource = new FeatureEngineeringKafkaConfig("featureEngineeringKafkaConfigResource", FeatureEngineeringKafkaConfigArgs.builder()
.authConfig(FeatureEngineeringKafkaConfigAuthConfigArgs.builder()
.ucServiceCredentialName("string")
.build())
.bootstrapServers("string")
.subscriptionMode(FeatureEngineeringKafkaConfigSubscriptionModeArgs.builder()
.assign("string")
.subscribe("string")
.subscribePattern("string")
.build())
.extraOptions(Map.of("string", "string"))
.keySchema(FeatureEngineeringKafkaConfigKeySchemaArgs.builder()
.jsonSchema("string")
.build())
.valueSchema(FeatureEngineeringKafkaConfigValueSchemaArgs.builder()
.jsonSchema("string")
.build())
.build());
feature_engineering_kafka_config_resource = databricks.FeatureEngineeringKafkaConfig("featureEngineeringKafkaConfigResource",
auth_config={
"uc_service_credential_name": "string",
},
bootstrap_servers="string",
subscription_mode={
"assign": "string",
"subscribe": "string",
"subscribe_pattern": "string",
},
extra_options={
"string": "string",
},
key_schema={
"json_schema": "string",
},
value_schema={
"json_schema": "string",
})
const featureEngineeringKafkaConfigResource = new databricks.FeatureEngineeringKafkaConfig("featureEngineeringKafkaConfigResource", {
authConfig: {
ucServiceCredentialName: "string",
},
bootstrapServers: "string",
subscriptionMode: {
assign: "string",
subscribe: "string",
subscribePattern: "string",
},
extraOptions: {
string: "string",
},
keySchema: {
jsonSchema: "string",
},
valueSchema: {
jsonSchema: "string",
},
});
type: databricks:FeatureEngineeringKafkaConfig
properties:
authConfig:
ucServiceCredentialName: string
bootstrapServers: string
extraOptions:
string: string
keySchema:
jsonSchema: string
subscriptionMode:
assign: string
subscribe: string
subscribePattern: string
valueSchema:
jsonSchema: string
FeatureEngineeringKafkaConfig Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The FeatureEngineeringKafkaConfig resource accepts the following input properties:
- Auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- Bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- Subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- Extra
Options Dictionary<string, string> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- Key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- Value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- Auth
Config FeatureEngineering Kafka Config Auth Config Args - Authentication configuration for connection to topics
- Bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- Subscription
Mode FeatureEngineering Kafka Config Subscription Mode Args - Options to configure which Kafka topics to pull data from
- Extra
Options map[string]string - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- Key
Schema FeatureEngineering Kafka Config Key Schema Args - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- Value
Schema FeatureEngineering Kafka Config Value Schema Args - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- bootstrap
Servers String - A comma-separated list of host/port pairs pointing to Kafka cluster
- subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- extra
Options Map<String,String> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- extra
Options {[key: string]: string} - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth_
config FeatureEngineering Kafka Config Auth Config Args - Authentication configuration for connection to topics
- bootstrap_
servers str - A comma-separated list of host/port pairs pointing to Kafka cluster
- subscription_
mode FeatureEngineering Kafka Config Subscription Mode Args - Options to configure which Kafka topics to pull data from
- extra_
options Mapping[str, str] - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key_
schema FeatureEngineering Kafka Config Key Schema Args - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- value_
schema FeatureEngineering Kafka Config Value Schema Args - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config Property Map - Authentication configuration for connection to topics
- bootstrap
Servers String - A comma-separated list of host/port pairs pointing to Kafka cluster
- subscription
Mode Property Map - Options to configure which Kafka topics to pull data from
- extra
Options Map<String> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema Property Map - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- value
Schema Property Map - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
Outputs
All input properties are implicitly available as output properties. Additionally, the FeatureEngineeringKafkaConfig resource produces the following output properties:
Look up Existing FeatureEngineeringKafkaConfig Resource
Get an existing FeatureEngineeringKafkaConfig resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: FeatureEngineeringKafkaConfigState, opts?: CustomResourceOptions): FeatureEngineeringKafkaConfig@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
auth_config: Optional[FeatureEngineeringKafkaConfigAuthConfigArgs] = None,
bootstrap_servers: Optional[str] = None,
extra_options: Optional[Mapping[str, str]] = None,
key_schema: Optional[FeatureEngineeringKafkaConfigKeySchemaArgs] = None,
name: Optional[str] = None,
subscription_mode: Optional[FeatureEngineeringKafkaConfigSubscriptionModeArgs] = None,
value_schema: Optional[FeatureEngineeringKafkaConfigValueSchemaArgs] = None) -> FeatureEngineeringKafkaConfigfunc GetFeatureEngineeringKafkaConfig(ctx *Context, name string, id IDInput, state *FeatureEngineeringKafkaConfigState, opts ...ResourceOption) (*FeatureEngineeringKafkaConfig, error)public static FeatureEngineeringKafkaConfig Get(string name, Input<string> id, FeatureEngineeringKafkaConfigState? state, CustomResourceOptions? opts = null)public static FeatureEngineeringKafkaConfig get(String name, Output<String> id, FeatureEngineeringKafkaConfigState state, CustomResourceOptions options)resources: _: type: databricks:FeatureEngineeringKafkaConfig get: id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- Bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- Extra
Options Dictionary<string, string> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- Key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- Name string
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- Subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- Value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- Auth
Config FeatureEngineering Kafka Config Auth Config Args - Authentication configuration for connection to topics
- Bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- Extra
Options map[string]string - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- Key
Schema FeatureEngineering Kafka Config Key Schema Args - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- Name string
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- Subscription
Mode FeatureEngineering Kafka Config Subscription Mode Args - Options to configure which Kafka topics to pull data from
- Value
Schema FeatureEngineering Kafka Config Value Schema Args - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- bootstrap
Servers String - A comma-separated list of host/port pairs pointing to Kafka cluster
- extra
Options Map<String,String> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- name String
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config FeatureEngineering Kafka Config Auth Config - Authentication configuration for connection to topics
- bootstrap
Servers string - A comma-separated list of host/port pairs pointing to Kafka cluster
- extra
Options {[key: string]: string} - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema FeatureEngineering Kafka Config Key Schema - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- name string
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- subscription
Mode FeatureEngineering Kafka Config Subscription Mode - Options to configure which Kafka topics to pull data from
- value
Schema FeatureEngineering Kafka Config Value Schema - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth_
config FeatureEngineering Kafka Config Auth Config Args - Authentication configuration for connection to topics
- bootstrap_
servers str - A comma-separated list of host/port pairs pointing to Kafka cluster
- extra_
options Mapping[str, str] - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key_
schema FeatureEngineering Kafka Config Key Schema Args - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- name str
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- subscription_
mode FeatureEngineering Kafka Config Subscription Mode Args - Options to configure which Kafka topics to pull data from
- value_
schema FeatureEngineering Kafka Config Value Schema Args - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
- auth
Config Property Map - Authentication configuration for connection to topics
- bootstrap
Servers String - A comma-separated list of host/port pairs pointing to Kafka cluster
- extra
Options Map<String> - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
- key
Schema Property Map - Schema configuration for extracting message keys from topics. At least one of key_schema and value_schema must be provided
- name String
- (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature. Can be distinct from topic name
- subscription
Mode Property Map - Options to configure which Kafka topics to pull data from
- value
Schema Property Map - Schema configuration for extracting message values from topics. At least one of key_schema and value_schema must be provided
Supporting Types
FeatureEngineeringKafkaConfigAuthConfig, FeatureEngineeringKafkaConfigAuthConfigArgs
- Uc
Service stringCredential Name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
- Uc
Service stringCredential Name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
- uc
Service StringCredential Name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
- uc
Service stringCredential Name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
- uc_
service_ strcredential_ name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
- uc
Service StringCredential Name - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
FeatureEngineeringKafkaConfigKeySchema, FeatureEngineeringKafkaConfigKeySchemaArgs
- Json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- Json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema String - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json_
schema str - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema String - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
FeatureEngineeringKafkaConfigSubscriptionMode, FeatureEngineeringKafkaConfigSubscriptionModeArgs
- Assign string
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- Subscribe string
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- Subscribe
Pattern string - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
- Assign string
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- Subscribe string
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- Subscribe
Pattern string - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
- assign String
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- subscribe String
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- subscribe
Pattern String - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
- assign string
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- subscribe string
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- subscribe
Pattern string - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
- assign str
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- subscribe str
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- subscribe_
pattern str - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
- assign String
- A JSON string that contains the specific topic-partitions to consume from. For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
- subscribe String
- A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
- subscribe
Pattern String - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
FeatureEngineeringKafkaConfigValueSchema, FeatureEngineeringKafkaConfigValueSchemaArgs
- Json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- Json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema String - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema string - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json_
schema str - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
- json
Schema String - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
Import
As of Pulumi v1.5, resources can be imported through configuration.
hcl
import {
id = “name”
to = databricks_feature_engineering_kafka_config.this
}
If you are using an older version of Pulumi, import the resource using the pulumi import command as follows:
$ pulumi import databricks:index/featureEngineeringKafkaConfig:FeatureEngineeringKafkaConfig this "name"
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
databricksTerraform Provider.
