mongodbatlas.StreamConnection describes a stream connection.
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as mongodbatlas from "@pulumi/mongodbatlas";
const example = mongodbatlas.getStreamConnection({
projectId: "<PROJECT_ID>",
workspaceName: "<WORKSPACE_NAME>",
connectionName: "<CONNECTION_NAME>",
});
import pulumi
import pulumi_mongodbatlas as mongodbatlas
example = mongodbatlas.get_stream_connection(project_id="<PROJECT_ID>",
workspace_name="<WORKSPACE_NAME>",
connection_name="<CONNECTION_NAME>")
package main
import (
"github.com/pulumi/pulumi-mongodbatlas/sdk/v4/go/mongodbatlas"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := mongodbatlas.LookupStreamConnection(ctx, &mongodbatlas.LookupStreamConnectionArgs{
ProjectId: "<PROJECT_ID>",
WorkspaceName: pulumi.StringRef("<WORKSPACE_NAME>"),
ConnectionName: "<CONNECTION_NAME>",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Mongodbatlas = Pulumi.Mongodbatlas;
return await Deployment.RunAsync(() =>
{
var example = Mongodbatlas.GetStreamConnection.Invoke(new()
{
ProjectId = "<PROJECT_ID>",
WorkspaceName = "<WORKSPACE_NAME>",
ConnectionName = "<CONNECTION_NAME>",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.MongodbatlasFunctions;
import com.pulumi.mongodbatlas.inputs.GetStreamConnectionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = MongodbatlasFunctions.getStreamConnection(GetStreamConnectionArgs.builder()
.projectId("<PROJECT_ID>")
.workspaceName("<WORKSPACE_NAME>")
.connectionName("<CONNECTION_NAME>")
.build());
}
}
variables:
example:
fn::invoke:
function: mongodbatlas:getStreamConnection
arguments:
projectId: <PROJECT_ID>
workspaceName: <WORKSPACE_NAME>
connectionName: <CONNECTION_NAME>
Example using<span pulumi-lang-nodejs=" workspaceName
" pulumi-lang-dotnet=" WorkspaceName " pulumi-lang-go=" workspaceName " pulumi-lang-python=" workspace_name " pulumi-lang-yaml=" workspaceName " pulumi-lang-java=" workspaceName “> workspace_name
import * as pulumi from "@pulumi/pulumi";
import * as mongodbatlas from "@pulumi/mongodbatlas";
const example = mongodbatlas.getStreamConnection({
projectId: "<PROJECT_ID>",
workspaceName: "<WORKSPACE_NAME>",
connectionName: "<CONNECTION_NAME>",
});
import pulumi
import pulumi_mongodbatlas as mongodbatlas
example = mongodbatlas.get_stream_connection(project_id="<PROJECT_ID>",
workspace_name="<WORKSPACE_NAME>",
connection_name="<CONNECTION_NAME>")
package main
import (
"github.com/pulumi/pulumi-mongodbatlas/sdk/v4/go/mongodbatlas"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := mongodbatlas.LookupStreamConnection(ctx, &mongodbatlas.LookupStreamConnectionArgs{
ProjectId: "<PROJECT_ID>",
WorkspaceName: pulumi.StringRef("<WORKSPACE_NAME>"),
ConnectionName: "<CONNECTION_NAME>",
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Mongodbatlas = Pulumi.Mongodbatlas;
return await Deployment.RunAsync(() =>
{
var example = Mongodbatlas.GetStreamConnection.Invoke(new()
{
ProjectId = "<PROJECT_ID>",
WorkspaceName = "<WORKSPACE_NAME>",
ConnectionName = "<CONNECTION_NAME>",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.mongodbatlas.MongodbatlasFunctions;
import com.pulumi.mongodbatlas.inputs.GetStreamConnectionArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var example = MongodbatlasFunctions.getStreamConnection(GetStreamConnectionArgs.builder()
.projectId("<PROJECT_ID>")
.workspaceName("<WORKSPACE_NAME>")
.connectionName("<CONNECTION_NAME>")
.build());
}
}
variables:
example:
fn::invoke:
function: mongodbatlas:getStreamConnection
arguments:
projectId: <PROJECT_ID>
workspaceName: <WORKSPACE_NAME>
connectionName: <CONNECTION_NAME>
Using getStreamConnection
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getStreamConnection(args: GetStreamConnectionArgs, opts?: InvokeOptions): Promise<GetStreamConnectionResult>
function getStreamConnectionOutput(args: GetStreamConnectionOutputArgs, opts?: InvokeOptions): Output<GetStreamConnectionResult>def get_stream_connection(connection_name: Optional[str] = None,
instance_name: Optional[str] = None,
project_id: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetStreamConnectionResult
def get_stream_connection_output(connection_name: Optional[pulumi.Input[str]] = None,
instance_name: Optional[pulumi.Input[str]] = None,
project_id: Optional[pulumi.Input[str]] = None,
workspace_name: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetStreamConnectionResult]func LookupStreamConnection(ctx *Context, args *LookupStreamConnectionArgs, opts ...InvokeOption) (*LookupStreamConnectionResult, error)
func LookupStreamConnectionOutput(ctx *Context, args *LookupStreamConnectionOutputArgs, opts ...InvokeOption) LookupStreamConnectionResultOutput> Note: This function is named LookupStreamConnection in the Go SDK.
public static class GetStreamConnection
{
public static Task<GetStreamConnectionResult> InvokeAsync(GetStreamConnectionArgs args, InvokeOptions? opts = null)
public static Output<GetStreamConnectionResult> Invoke(GetStreamConnectionInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetStreamConnectionResult> getStreamConnection(GetStreamConnectionArgs args, InvokeOptions options)
public static Output<GetStreamConnectionResult> getStreamConnection(GetStreamConnectionArgs args, InvokeOptions options)
fn::invoke:
function: mongodbatlas:index/getStreamConnection:getStreamConnection
arguments:
# arguments dictionaryThe following arguments are supported:
- Connection
Name string Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- Project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- Instance
Name string - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - Workspace
Name string - Label that identifies the stream processing workspace. Conflicts with
instance_name.
- Connection
Name string Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- Project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- Instance
Name string - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - Workspace
Name string - Label that identifies the stream processing workspace. Conflicts with
instance_name.
- connection
Name String Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- project
Id String - Unique 24-hexadecimal digit string that identifies your project.
- instance
Name String - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - workspace
Name String - Label that identifies the stream processing workspace. Conflicts with
instance_name.
- connection
Name string Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- project
Id string - Unique 24-hexadecimal digit string that identifies your project.
- instance
Name string - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - workspace
Name string - Label that identifies the stream processing workspace. Conflicts with
instance_name.
- connection_
name str Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- project_
id str - Unique 24-hexadecimal digit string that identifies your project.
- instance_
name str - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - workspace_
name str - Label that identifies the stream processing workspace. Conflicts with
instance_name.
- connection
Name String Label that identifies the stream connection. In the case of the Sample type, this is the name of the sample source.
NOTE: Either
workspace_nameorinstance_namemust be provided, but not both. These fields are functionally identical andworkspace_nameis an alias forinstance_name.workspace_nameshould be used instead ofinstance_name.- project
Id String - Unique 24-hexadecimal digit string that identifies your project.
- instance
Name String - Label that identifies the stream processing workspace. Attribute is deprecated and will be removed in following major versions in favor of
workspace_name. - workspace
Name String - Label that identifies the stream processing workspace. Conflicts with
instance_name.
getStreamConnection Result
The following output properties are available:
- Authentication
Get
Stream Connection Authentication - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- Aws
Get
Stream Connection Aws - The configuration for AWS Lambda connection. See AWS
- Bootstrap
Servers string - Comma separated list of server addresses.
- Cluster
Name string - Name of the cluster configured for this connection.
- Cluster
Project stringId - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- Config Dictionary<string, string>
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- Connection
Name string - Db
Role GetTo Execute Stream Connection Db Role To Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- Headers Dictionary<string, string>
- A map of key-value pairs for optional headers.
- Id string
- Networking
Get
Stream Connection Networking - Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - Project
Id string - Schema
Registry GetAuthentication Stream Connection Schema Registry Authentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- Schema
Registry stringProvider - The Schema Registry provider. Must be set to
CONFLUENT. - Schema
Registry List<string>Urls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- Security
Get
Stream Connection Security - Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Url string
- URL of the HTTPs endpoint that will be used for creating a connection.
- Instance
Name string - Workspace
Name string
- Authentication
Get
Stream Connection Authentication - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- Aws
Get
Stream Connection Aws - The configuration for AWS Lambda connection. See AWS
- Bootstrap
Servers string - Comma separated list of server addresses.
- Cluster
Name string - Name of the cluster configured for this connection.
- Cluster
Project stringId - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- Config map[string]string
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- Connection
Name string - Db
Role GetTo Execute Stream Connection Db Role To Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- Headers map[string]string
- A map of key-value pairs for optional headers.
- Id string
- Networking
Get
Stream Connection Networking - Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - Project
Id string - Schema
Registry GetAuthentication Stream Connection Schema Registry Authentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- Schema
Registry stringProvider - The Schema Registry provider. Must be set to
CONFLUENT. - Schema
Registry []stringUrls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- Security
Get
Stream Connection Security - Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Url string
- URL of the HTTPs endpoint that will be used for creating a connection.
- Instance
Name string - Workspace
Name string
- authentication
Get
Stream Connection Authentication - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- aws
Get
Stream Connection Aws - The configuration for AWS Lambda connection. See AWS
- bootstrap
Servers String - Comma separated list of server addresses.
- cluster
Name String - Name of the cluster configured for this connection.
- cluster
Project StringId - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- config Map<String,String>
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- connection
Name String - db
Role GetTo Execute Stream Connection Db Role To Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- headers Map<String,String>
- A map of key-value pairs for optional headers.
- id String
- networking
Get
Stream Connection Networking - Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - project
Id String - schema
Registry GetAuthentication Stream Connection Schema Registry Authentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- schema
Registry StringProvider - The Schema Registry provider. Must be set to
CONFLUENT. - schema
Registry List<String>Urls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- security
Get
Stream Connection Security - Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- url String
- URL of the HTTPs endpoint that will be used for creating a connection.
- instance
Name String - workspace
Name String
- authentication
Get
Stream Connection Authentication - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- aws
Get
Stream Connection Aws - The configuration for AWS Lambda connection. See AWS
- bootstrap
Servers string - Comma separated list of server addresses.
- cluster
Name string - Name of the cluster configured for this connection.
- cluster
Project stringId - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- config {[key: string]: string}
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- connection
Name string - db
Role GetTo Execute Stream Connection Db Role To Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- headers {[key: string]: string}
- A map of key-value pairs for optional headers.
- id string
- networking
Get
Stream Connection Networking - Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - project
Id string - schema
Registry GetAuthentication Stream Connection Schema Registry Authentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- schema
Registry stringProvider - The Schema Registry provider. Must be set to
CONFLUENT. - schema
Registry string[]Urls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- security
Get
Stream Connection Security - Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- url string
- URL of the HTTPs endpoint that will be used for creating a connection.
- instance
Name string - workspace
Name string
- authentication
Get
Stream Connection Authentication - User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- aws
Get
Stream Connection Aws - The configuration for AWS Lambda connection. See AWS
- bootstrap_
servers str - Comma separated list of server addresses.
- cluster_
name str - Name of the cluster configured for this connection.
- cluster_
project_ strid - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- config Mapping[str, str]
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- connection_
name str - db_
role_ Getto_ execute Stream Connection Db Role To Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- headers Mapping[str, str]
- A map of key-value pairs for optional headers.
- id str
- networking
Get
Stream Connection Networking - Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - project_
id str - schema_
registry_ Getauthentication Stream Connection Schema Registry Authentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- schema_
registry_ strprovider - The Schema Registry provider. Must be set to
CONFLUENT. - schema_
registry_ Sequence[str]urls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- security
Get
Stream Connection Security - Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- type str
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- url str
- URL of the HTTPs endpoint that will be used for creating a connection.
- instance_
name str - workspace_
name str
- authentication Property Map
- User credentials required to connect to a Kafka cluster. Includes the authentication type, as well as the parameters for that authentication mode. See authentication.
- aws Property Map
- The configuration for AWS Lambda connection. See AWS
- bootstrap
Servers String - Comma separated list of server addresses.
- cluster
Name String - Name of the cluster configured for this connection.
- cluster
Project StringId - Unique 24-hexadecimal digit string that identifies the project that contains the configured cluster. Required if the ID does not match the project containing the streams instance. You must first enable the organization setting.
- config Map<String>
- A map of Kafka key-value pairs for optional configuration. This is a flat object, and keys can have '.' characters.
- connection
Name String - db
Role Property MapTo Execute - The name of a Built in or Custom DB Role to connect to an Atlas Cluster. See DBRoleToExecute.
- headers Map<String>
- A map of key-value pairs for optional headers.
- id String
- networking Property Map
- Networking Access Type can either be
PUBLIC(default) orVPC. See networking. - project
Id String - schema
Registry Property MapAuthentication - Authentication configuration for Schema Registry. See Schema Registry Authentication.
- schema
Registry StringProvider - The Schema Registry provider. Must be set to
CONFLUENT. - schema
Registry List<String>Urls - List of Schema Registry endpoint URLs used by this connection. Each URL must use the http or https scheme and specify a valid host and optional port.
- security Property Map
- Properties for the secure transport connection to Kafka. For SASL_SSL, this can include the trusted certificate to use. See security.
- type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- url String
- URL of the HTTPs endpoint that will be used for creating a connection.
- instance
Name String - workspace
Name String
Supporting Types
GetStreamConnectionAuthentication
- Client
Id string - Public identifier for the Kafka client.
- Client
Secret string - Secret known only to the Kafka client and the authorization server.
- Mechanism string
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - Method string
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- Password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - Sasl
Oauthbearer stringExtensions - Additional information to provide to the Kafka broker.
- Scope string
- Scope of the access request to the broker specified by the Kafka clients.
- Token
Endpoint stringUrl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- Username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- Client
Id string - Public identifier for the Kafka client.
- Client
Secret string - Secret known only to the Kafka client and the authorization server.
- Mechanism string
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - Method string
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- Password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - Sasl
Oauthbearer stringExtensions - Additional information to provide to the Kafka broker.
- Scope string
- Scope of the access request to the broker specified by the Kafka clients.
- Token
Endpoint stringUrl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- Username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- client
Id String - Public identifier for the Kafka client.
- client
Secret String - Secret known only to the Kafka client and the authorization server.
- mechanism String
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - method String
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- password String
- Password for the Schema Registry. Required when
typeisUSER_INFO. - sasl
Oauthbearer StringExtensions - Additional information to provide to the Kafka broker.
- scope String
- Scope of the access request to the broker specified by the Kafka clients.
- token
Endpoint StringUrl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- username String
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- client
Id string - Public identifier for the Kafka client.
- client
Secret string - Secret known only to the Kafka client and the authorization server.
- mechanism string
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - method string
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - sasl
Oauthbearer stringExtensions - Additional information to provide to the Kafka broker.
- scope string
- Scope of the access request to the broker specified by the Kafka clients.
- token
Endpoint stringUrl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- client_
id str - Public identifier for the Kafka client.
- client_
secret str - Secret known only to the Kafka client and the authorization server.
- mechanism str
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - method str
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- password str
- Password for the Schema Registry. Required when
typeisUSER_INFO. - sasl_
oauthbearer_ strextensions - Additional information to provide to the Kafka broker.
- scope str
- Scope of the access request to the broker specified by the Kafka clients.
- token_
endpoint_ strurl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- username str
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- client
Id String - Public identifier for the Kafka client.
- client
Secret String - Secret known only to the Kafka client and the authorization server.
- mechanism String
- Method of authentication. Value can be
PLAIN,SCRAM-256,SCRAM-512, orOAUTHBEARER. - method String
- SASL OAUTHBEARER authentication method. Value must be OIDC.
- password String
- Password for the Schema Registry. Required when
typeisUSER_INFO. - sasl
Oauthbearer StringExtensions - Additional information to provide to the Kafka broker.
- scope String
- Scope of the access request to the broker specified by the Kafka clients.
- token
Endpoint StringUrl - OAUTH issuer (IdP provider) token endpoint HTTP(S) URI used to retrieve the token.
- username String
- Username for the Schema Registry. Required when
typeisUSER_INFO.
GetStreamConnectionAws
- Role
Arn string - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
- Role
Arn string - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
- role
Arn String - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
- role
Arn string - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
- role_
arn str - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
- role
Arn String - Amazon Resource Name (ARN) that identifies the Amazon Web Services (AWS) Identity and Access Management (IAM) role that MongoDB Cloud assumes when it accesses resources in your AWS account.
GetStreamConnectionDbRoleToExecute
- Role string
- The name of the role to use. Can be a built in role or a custom role.
- Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Role string
- The name of the role to use. Can be a built in role or a custom role.
- Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- role String
- The name of the role to use. Can be a built in role or a custom role.
- type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- role string
- The name of the role to use. Can be a built in role or a custom role.
- type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- role str
- The name of the role to use. Can be a built in role or a custom role.
- type str
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- role String
- The name of the role to use. Can be a built in role or a custom role.
- type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
GetStreamConnectionNetworking
- Access
Get
Stream Connection Networking Access - Information about the networking access. See access.
- Access
Get
Stream Connection Networking Access - Information about the networking access. See access.
- access
Get
Stream Connection Networking Access - Information about the networking access. See access.
- access
Get
Stream Connection Networking Access - Information about the networking access. See access.
- access
Get
Stream Connection Networking Access - Information about the networking access. See access.
- access Property Map
- Information about the networking access. See access.
GetStreamConnectionNetworkingAccess
- Connection
Id string - Id of the Private Link connection when type is
PRIVATE_LINK. - Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Connection
Id string - Id of the Private Link connection when type is
PRIVATE_LINK. - Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- connection
Id String - Id of the Private Link connection when type is
PRIVATE_LINK. - type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- connection
Id string - Id of the Private Link connection when type is
PRIVATE_LINK. - type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- connection_
id str - Id of the Private Link connection when type is
PRIVATE_LINK. - type str
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- connection
Id String - Id of the Private Link connection when type is
PRIVATE_LINK. - type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
GetStreamConnectionSchemaRegistryAuthentication
- Password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- Password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - Type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- Username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- password String
- Password for the Schema Registry. Required when
typeisUSER_INFO. - type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- username String
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- password string
- Password for the Schema Registry. Required when
typeisUSER_INFO. - type string
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- username string
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- password str
- Password for the Schema Registry. Required when
typeisUSER_INFO. - type str
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- username str
- Username for the Schema Registry. Required when
typeisUSER_INFO.
- password String
- Password for the Schema Registry. Required when
typeisUSER_INFO. - type String
- Authentication type discriminator. Specifies the authentication mechanism for Confluent Schema Registry. Valid values are
USER_INFOorSASL_INHERIT.USER_INFO- Uses username and password authentication for Confluent Schema Registry.SASL_INHERIT- Inherits the authentication configuration from Kafka for the Confluent Schema Registry.
- username String
- Username for the Schema Registry. Required when
typeisUSER_INFO.
GetStreamConnectionSecurity
- Broker
Public stringCertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- Protocol string
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
- Broker
Public stringCertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- Protocol string
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
- broker
Public StringCertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- protocol String
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
- broker
Public stringCertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- protocol string
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
- broker_
public_ strcertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- protocol str
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
- broker
Public StringCertificate - A trusted, public x509 certificate for connecting to Kafka over SSL. String value of the certificate must be defined in the attribute.
- protocol String
- Describes the transport type. Can be either
SASL_PLAINTEXTorSASL_SSL.
Package Details
- Repository
- MongoDB Atlas pulumi/pulumi-mongodbatlas
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
mongodbatlasTerraform Provider.
