databricks logo
Databricks v1.14.0, May 23 23

databricks.Secret

Explore with Pulumi AI

With this resource you can insert a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret’s value. The server encrypts the secret using the secret scope’s encryption settings before storing it. You must have WRITE or MANAGE permission on the secret scope. The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. You can read a secret value only from within a command on a cluster (for example, through a notebook); there is no API to read a secret value outside of a cluster. The permission applied is based on who is invoking the command and you must have at least READ permission. Please consult Secrets User Guide for more details.

The following resources are often used in the same context:

Example Usage

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var app = new Databricks.SecretScope("app");

    var publishingApi = new Databricks.Secret("publishingApi", new()
    {
        Key = "publishing_api",
        StringValue = data.Azurerm_key_vault_secret.Example.Value,
        Scope = app.Id,
    });

    var @this = new Databricks.Cluster("this", new()
    {
        SparkConf = 
        {
            { "fs.azure.account.oauth2.client.secret", publishingApi.ConfigReference },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		app, err := databricks.NewSecretScope(ctx, "app", nil)
		if err != nil {
			return err
		}
		publishingApi, err := databricks.NewSecret(ctx, "publishingApi", &databricks.SecretArgs{
			Key:         pulumi.String("publishing_api"),
			StringValue: pulumi.Any(data.Azurerm_key_vault_secret.Example.Value),
			Scope:       app.ID(),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewCluster(ctx, "this", &databricks.ClusterArgs{
			SparkConf: pulumi.AnyMap{
				"fs.azure.account.oauth2.client.secret": publishingApi.ConfigReference,
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.SecretScope;
import com.pulumi.databricks.Secret;
import com.pulumi.databricks.SecretArgs;
import com.pulumi.databricks.Cluster;
import com.pulumi.databricks.ClusterArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var app = new SecretScope("app");

        var publishingApi = new Secret("publishingApi", SecretArgs.builder()        
            .key("publishing_api")
            .stringValue(data.azurerm_key_vault_secret().example().value())
            .scope(app.id())
            .build());

        var this_ = new Cluster("this", ClusterArgs.builder()        
            .sparkConf(Map.of("fs.azure.account.oauth2.client.secret", publishingApi.configReference()))
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

app = databricks.SecretScope("app")
publishing_api = databricks.Secret("publishingApi",
    key="publishing_api",
    string_value=data["azurerm_key_vault_secret"]["example"]["value"],
    scope=app.id)
this = databricks.Cluster("this", spark_conf={
    "fs.azure.account.oauth2.client.secret": publishing_api.config_reference,
})
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const app = new databricks.SecretScope("app", {});
const publishingApi = new databricks.Secret("publishingApi", {
    key: "publishing_api",
    stringValue: data.azurerm_key_vault_secret.example.value,
    scope: app.id,
});
const _this = new databricks.Cluster("this", {sparkConf: {
    "fs.azure.account.oauth2.client.secret": publishingApi.configReference,
}});
resources:
  app:
    type: databricks:SecretScope
  publishingApi:
    type: databricks:Secret
    properties:
      key: publishing_api
      stringValue: ${data.azurerm_key_vault_secret.example.value}
      scope: ${app.id}
  this:
    type: databricks:Cluster
    properties:
      # ...
      sparkConf:
        fs.azure.account.oauth2.client.secret: ${publishingApi.configReference}

Create Secret Resource

new Secret(name: string, args: SecretArgs, opts?: CustomResourceOptions);
@overload
def Secret(resource_name: str,
           opts: Optional[ResourceOptions] = None,
           key: Optional[str] = None,
           scope: Optional[str] = None,
           string_value: Optional[str] = None)
@overload
def Secret(resource_name: str,
           args: SecretArgs,
           opts: Optional[ResourceOptions] = None)
func NewSecret(ctx *Context, name string, args SecretArgs, opts ...ResourceOption) (*Secret, error)
public Secret(string name, SecretArgs args, CustomResourceOptions? opts = null)
public Secret(String name, SecretArgs args)
public Secret(String name, SecretArgs args, CustomResourceOptions options)
type: databricks:Secret
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args SecretArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args SecretArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args SecretArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args SecretArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args SecretArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Secret Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The Secret resource accepts the following input properties:

Key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

Scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

StringValue string

(String) super secret sensitive value.

Key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

Scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

StringValue string

(String) super secret sensitive value.

key String

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

scope String

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue String

(String) super secret sensitive value.

key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue string

(String) super secret sensitive value.

key str

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

scope str

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

string_value str

(String) super secret sensitive value.

key String

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

scope String

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue String

(String) super secret sensitive value.

Outputs

All input properties are implicitly available as output properties. Additionally, the Secret resource produces the following output properties:

ConfigReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

Id string

The provider-assigned unique ID for this managed resource.

LastUpdatedTimestamp int

(Integer) time secret was updated

ConfigReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

Id string

The provider-assigned unique ID for this managed resource.

LastUpdatedTimestamp int

(Integer) time secret was updated

configReference String

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

id String

The provider-assigned unique ID for this managed resource.

lastUpdatedTimestamp Integer

(Integer) time secret was updated

configReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

id string

The provider-assigned unique ID for this managed resource.

lastUpdatedTimestamp number

(Integer) time secret was updated

config_reference str

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

id str

The provider-assigned unique ID for this managed resource.

last_updated_timestamp int

(Integer) time secret was updated

configReference String

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

id String

The provider-assigned unique ID for this managed resource.

lastUpdatedTimestamp Number

(Integer) time secret was updated

Look up Existing Secret Resource

Get an existing Secret resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: SecretState, opts?: CustomResourceOptions): Secret
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        config_reference: Optional[str] = None,
        key: Optional[str] = None,
        last_updated_timestamp: Optional[int] = None,
        scope: Optional[str] = None,
        string_value: Optional[str] = None) -> Secret
func GetSecret(ctx *Context, name string, id IDInput, state *SecretState, opts ...ResourceOption) (*Secret, error)
public static Secret Get(string name, Input<string> id, SecretState? state, CustomResourceOptions? opts = null)
public static Secret get(String name, Output<String> id, SecretState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
ConfigReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

Key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

LastUpdatedTimestamp int

(Integer) time secret was updated

Scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

StringValue string

(String) super secret sensitive value.

ConfigReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

Key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

LastUpdatedTimestamp int

(Integer) time secret was updated

Scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

StringValue string

(String) super secret sensitive value.

configReference String

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

key String

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

lastUpdatedTimestamp Integer

(Integer) time secret was updated

scope String

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue String

(String) super secret sensitive value.

configReference string

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

key string

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

lastUpdatedTimestamp number

(Integer) time secret was updated

scope string

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue string

(String) super secret sensitive value.

config_reference str

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

key str

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

last_updated_timestamp int

(Integer) time secret was updated

scope str

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

string_value str

(String) super secret sensitive value.

configReference String

(String) value to use as a secret reference in Spark configuration and environment variables: {{secrets/scope/key}}.

key String

(String) key within secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

lastUpdatedTimestamp Number

(Integer) time secret was updated

scope String

(String) name of databricks secret scope. Must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters.

stringValue String

(String) super secret sensitive value.

Import

The resource secret can be imported using scopeName|||secretKey combination. This may change in future versions. bash

 $ pulumi import databricks:index/secret:Secret app `scopeName|||secretKey`

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.