1. Packages
  2. dbt Cloud
  3. API Docs
  4. DatabricksCredential
dbt Cloud v0.1.10 published on Thursday, Jul 18, 2024 by Pulumi

dbtcloud.DatabricksCredential

Explore with Pulumi AI

dbtcloud logo
dbt Cloud v0.1.10 published on Thursday, Jul 18, 2024 by Pulumi

    Example Usage

    import * as pulumi from "@pulumi/pulumi";
    import * as dbtcloud from "@pulumi/dbtcloud";
    
    // when using the Databricks adapter
    const myDatabricksCred = new dbtcloud.DatabricksCredential("my_databricks_cred", {
        projectId: dbtProject.id,
        adapterId: myDatabricksConnection.adapterId,
        targetName: "prod",
        token: "abcdefgh",
        schema: "my_schema",
        adapterType: "databricks",
    });
    // when using the Spark adapter
    const mySparkCred = new dbtcloud.DatabricksCredential("my_spark_cred", {
        projectId: dbtProject.id,
        adapterId: myDatabricksConnection.adapterId,
        targetName: "prod",
        token: "abcdefgh",
        schema: "my_schema",
        adapterType: "spark",
    });
    
    import pulumi
    import pulumi_dbtcloud as dbtcloud
    
    # when using the Databricks adapter
    my_databricks_cred = dbtcloud.DatabricksCredential("my_databricks_cred",
        project_id=dbt_project["id"],
        adapter_id=my_databricks_connection["adapterId"],
        target_name="prod",
        token="abcdefgh",
        schema="my_schema",
        adapter_type="databricks")
    # when using the Spark adapter
    my_spark_cred = dbtcloud.DatabricksCredential("my_spark_cred",
        project_id=dbt_project["id"],
        adapter_id=my_databricks_connection["adapterId"],
        target_name="prod",
        token="abcdefgh",
        schema="my_schema",
        adapter_type="spark")
    
    package main
    
    import (
    	"github.com/pulumi/pulumi-dbtcloud/sdk/go/dbtcloud"
    	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
    )
    
    func main() {
    	pulumi.Run(func(ctx *pulumi.Context) error {
    		// when using the Databricks adapter
    		_, err := dbtcloud.NewDatabricksCredential(ctx, "my_databricks_cred", &dbtcloud.DatabricksCredentialArgs{
    			ProjectId:   pulumi.Any(dbtProject.Id),
    			AdapterId:   pulumi.Any(myDatabricksConnection.AdapterId),
    			TargetName:  pulumi.String("prod"),
    			Token:       pulumi.String("abcdefgh"),
    			Schema:      pulumi.String("my_schema"),
    			AdapterType: pulumi.String("databricks"),
    		})
    		if err != nil {
    			return err
    		}
    		// when using the Spark adapter
    		_, err = dbtcloud.NewDatabricksCredential(ctx, "my_spark_cred", &dbtcloud.DatabricksCredentialArgs{
    			ProjectId:   pulumi.Any(dbtProject.Id),
    			AdapterId:   pulumi.Any(myDatabricksConnection.AdapterId),
    			TargetName:  pulumi.String("prod"),
    			Token:       pulumi.String("abcdefgh"),
    			Schema:      pulumi.String("my_schema"),
    			AdapterType: pulumi.String("spark"),
    		})
    		if err != nil {
    			return err
    		}
    		return nil
    	})
    }
    
    using System.Collections.Generic;
    using System.Linq;
    using Pulumi;
    using DbtCloud = Pulumi.DbtCloud;
    
    return await Deployment.RunAsync(() => 
    {
        // when using the Databricks adapter
        var myDatabricksCred = new DbtCloud.DatabricksCredential("my_databricks_cred", new()
        {
            ProjectId = dbtProject.Id,
            AdapterId = myDatabricksConnection.AdapterId,
            TargetName = "prod",
            Token = "abcdefgh",
            Schema = "my_schema",
            AdapterType = "databricks",
        });
    
        // when using the Spark adapter
        var mySparkCred = new DbtCloud.DatabricksCredential("my_spark_cred", new()
        {
            ProjectId = dbtProject.Id,
            AdapterId = myDatabricksConnection.AdapterId,
            TargetName = "prod",
            Token = "abcdefgh",
            Schema = "my_schema",
            AdapterType = "spark",
        });
    
    });
    
    package generated_program;
    
    import com.pulumi.Context;
    import com.pulumi.Pulumi;
    import com.pulumi.core.Output;
    import com.pulumi.dbtcloud.DatabricksCredential;
    import com.pulumi.dbtcloud.DatabricksCredentialArgs;
    import java.util.List;
    import java.util.ArrayList;
    import java.util.Map;
    import java.io.File;
    import java.nio.file.Files;
    import java.nio.file.Paths;
    
    public class App {
        public static void main(String[] args) {
            Pulumi.run(App::stack);
        }
    
        public static void stack(Context ctx) {
            // when using the Databricks adapter
            var myDatabricksCred = new DatabricksCredential("myDatabricksCred", DatabricksCredentialArgs.builder()
                .projectId(dbtProject.id())
                .adapterId(myDatabricksConnection.adapterId())
                .targetName("prod")
                .token("abcdefgh")
                .schema("my_schema")
                .adapterType("databricks")
                .build());
    
            // when using the Spark adapter
            var mySparkCred = new DatabricksCredential("mySparkCred", DatabricksCredentialArgs.builder()
                .projectId(dbtProject.id())
                .adapterId(myDatabricksConnection.adapterId())
                .targetName("prod")
                .token("abcdefgh")
                .schema("my_schema")
                .adapterType("spark")
                .build());
    
        }
    }
    
    resources:
      # when using the Databricks adapter
      myDatabricksCred:
        type: dbtcloud:DatabricksCredential
        name: my_databricks_cred
        properties:
          projectId: ${dbtProject.id}
          adapterId: ${myDatabricksConnection.adapterId}
          targetName: prod
          token: abcdefgh
          schema: my_schema
          adapterType: databricks
      # when using the Spark adapter
      mySparkCred:
        type: dbtcloud:DatabricksCredential
        name: my_spark_cred
        properties:
          projectId: ${dbtProject.id}
          adapterId: ${myDatabricksConnection.adapterId}
          targetName: prod
          token: abcdefgh
          schema: my_schema
          adapterType: spark
    

    Create DatabricksCredential Resource

    Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.

    Constructor syntax

    new DatabricksCredential(name: string, args: DatabricksCredentialArgs, opts?: CustomResourceOptions);
    @overload
    def DatabricksCredential(resource_name: str,
                             args: DatabricksCredentialArgs,
                             opts: Optional[ResourceOptions] = None)
    
    @overload
    def DatabricksCredential(resource_name: str,
                             opts: Optional[ResourceOptions] = None,
                             adapter_id: Optional[int] = None,
                             adapter_type: Optional[str] = None,
                             project_id: Optional[int] = None,
                             schema: Optional[str] = None,
                             token: Optional[str] = None,
                             catalog: Optional[str] = None,
                             target_name: Optional[str] = None)
    func NewDatabricksCredential(ctx *Context, name string, args DatabricksCredentialArgs, opts ...ResourceOption) (*DatabricksCredential, error)
    public DatabricksCredential(string name, DatabricksCredentialArgs args, CustomResourceOptions? opts = null)
    public DatabricksCredential(String name, DatabricksCredentialArgs args)
    public DatabricksCredential(String name, DatabricksCredentialArgs args, CustomResourceOptions options)
    
    type: dbtcloud:DatabricksCredential
    properties: # The arguments to resource properties.
    options: # Bag of options to control resource's behavior.
    
    

    Parameters

    name string
    The unique name of the resource.
    args DatabricksCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    resource_name str
    The unique name of the resource.
    args DatabricksCredentialArgs
    The arguments to resource properties.
    opts ResourceOptions
    Bag of options to control resource's behavior.
    ctx Context
    Context object for the current deployment.
    name string
    The unique name of the resource.
    args DatabricksCredentialArgs
    The arguments to resource properties.
    opts ResourceOption
    Bag of options to control resource's behavior.
    name string
    The unique name of the resource.
    args DatabricksCredentialArgs
    The arguments to resource properties.
    opts CustomResourceOptions
    Bag of options to control resource's behavior.
    name String
    The unique name of the resource.
    args DatabricksCredentialArgs
    The arguments to resource properties.
    options CustomResourceOptions
    Bag of options to control resource's behavior.

    Constructor example

    The following reference example uses placeholder values for all input properties.

    var databricksCredentialResource = new DbtCloud.DatabricksCredential("databricksCredentialResource", new()
    {
        AdapterId = 0,
        AdapterType = "string",
        ProjectId = 0,
        Schema = "string",
        Token = "string",
        Catalog = "string",
        TargetName = "string",
    });
    
    example, err := dbtcloud.NewDatabricksCredential(ctx, "databricksCredentialResource", &dbtcloud.DatabricksCredentialArgs{
    	AdapterId:   pulumi.Int(0),
    	AdapterType: pulumi.String("string"),
    	ProjectId:   pulumi.Int(0),
    	Schema:      pulumi.String("string"),
    	Token:       pulumi.String("string"),
    	Catalog:     pulumi.String("string"),
    	TargetName:  pulumi.String("string"),
    })
    
    var databricksCredentialResource = new DatabricksCredential("databricksCredentialResource", DatabricksCredentialArgs.builder()
        .adapterId(0)
        .adapterType("string")
        .projectId(0)
        .schema("string")
        .token("string")
        .catalog("string")
        .targetName("string")
        .build());
    
    databricks_credential_resource = dbtcloud.DatabricksCredential("databricksCredentialResource",
        adapter_id=0,
        adapter_type="string",
        project_id=0,
        schema="string",
        token="string",
        catalog="string",
        target_name="string")
    
    const databricksCredentialResource = new dbtcloud.DatabricksCredential("databricksCredentialResource", {
        adapterId: 0,
        adapterType: "string",
        projectId: 0,
        schema: "string",
        token: "string",
        catalog: "string",
        targetName: "string",
    });
    
    type: dbtcloud:DatabricksCredential
    properties:
        adapterId: 0
        adapterType: string
        catalog: string
        projectId: 0
        schema: string
        targetName: string
        token: string
    

    DatabricksCredential Resource Properties

    To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

    Inputs

    The DatabricksCredential resource accepts the following input properties:

    AdapterId int
    Databricks adapter ID for the credential
    AdapterType string
    The type of the adapter (databricks or spark)
    ProjectId int
    Project ID to create the Databricks credential in
    Schema string
    The schema where to create models
    Token string
    Token for Databricks user
    Catalog string
    The catalog where to create models (only for the databricks adapter)
    TargetName string
    Target name
    AdapterId int
    Databricks adapter ID for the credential
    AdapterType string
    The type of the adapter (databricks or spark)
    ProjectId int
    Project ID to create the Databricks credential in
    Schema string
    The schema where to create models
    Token string
    Token for Databricks user
    Catalog string
    The catalog where to create models (only for the databricks adapter)
    TargetName string
    Target name
    adapterId Integer
    Databricks adapter ID for the credential
    adapterType String
    The type of the adapter (databricks or spark)
    projectId Integer
    Project ID to create the Databricks credential in
    schema String
    The schema where to create models
    token String
    Token for Databricks user
    catalog String
    The catalog where to create models (only for the databricks adapter)
    targetName String
    Target name
    adapterId number
    Databricks adapter ID for the credential
    adapterType string
    The type of the adapter (databricks or spark)
    projectId number
    Project ID to create the Databricks credential in
    schema string
    The schema where to create models
    token string
    Token for Databricks user
    catalog string
    The catalog where to create models (only for the databricks adapter)
    targetName string
    Target name
    adapter_id int
    Databricks adapter ID for the credential
    adapter_type str
    The type of the adapter (databricks or spark)
    project_id int
    Project ID to create the Databricks credential in
    schema str
    The schema where to create models
    token str
    Token for Databricks user
    catalog str
    The catalog where to create models (only for the databricks adapter)
    target_name str
    Target name
    adapterId Number
    Databricks adapter ID for the credential
    adapterType String
    The type of the adapter (databricks or spark)
    projectId Number
    Project ID to create the Databricks credential in
    schema String
    The schema where to create models
    token String
    Token for Databricks user
    catalog String
    The catalog where to create models (only for the databricks adapter)
    targetName String
    Target name

    Outputs

    All input properties are implicitly available as output properties. Additionally, the DatabricksCredential resource produces the following output properties:

    CredentialId int
    The system Databricks credential ID
    Id string
    The provider-assigned unique ID for this managed resource.
    CredentialId int
    The system Databricks credential ID
    Id string
    The provider-assigned unique ID for this managed resource.
    credentialId Integer
    The system Databricks credential ID
    id String
    The provider-assigned unique ID for this managed resource.
    credentialId number
    The system Databricks credential ID
    id string
    The provider-assigned unique ID for this managed resource.
    credential_id int
    The system Databricks credential ID
    id str
    The provider-assigned unique ID for this managed resource.
    credentialId Number
    The system Databricks credential ID
    id String
    The provider-assigned unique ID for this managed resource.

    Look up Existing DatabricksCredential Resource

    Get an existing DatabricksCredential resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

    public static get(name: string, id: Input<ID>, state?: DatabricksCredentialState, opts?: CustomResourceOptions): DatabricksCredential
    @staticmethod
    def get(resource_name: str,
            id: str,
            opts: Optional[ResourceOptions] = None,
            adapter_id: Optional[int] = None,
            adapter_type: Optional[str] = None,
            catalog: Optional[str] = None,
            credential_id: Optional[int] = None,
            project_id: Optional[int] = None,
            schema: Optional[str] = None,
            target_name: Optional[str] = None,
            token: Optional[str] = None) -> DatabricksCredential
    func GetDatabricksCredential(ctx *Context, name string, id IDInput, state *DatabricksCredentialState, opts ...ResourceOption) (*DatabricksCredential, error)
    public static DatabricksCredential Get(string name, Input<string> id, DatabricksCredentialState? state, CustomResourceOptions? opts = null)
    public static DatabricksCredential get(String name, Output<String> id, DatabricksCredentialState state, CustomResourceOptions options)
    Resource lookup is not supported in YAML
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    resource_name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    name
    The unique name of the resulting resource.
    id
    The unique provider ID of the resource to lookup.
    state
    Any extra arguments used during the lookup.
    opts
    A bag of options that control this resource's behavior.
    The following state arguments are supported:
    AdapterId int
    Databricks adapter ID for the credential
    AdapterType string
    The type of the adapter (databricks or spark)
    Catalog string
    The catalog where to create models (only for the databricks adapter)
    CredentialId int
    The system Databricks credential ID
    ProjectId int
    Project ID to create the Databricks credential in
    Schema string
    The schema where to create models
    TargetName string
    Target name
    Token string
    Token for Databricks user
    AdapterId int
    Databricks adapter ID for the credential
    AdapterType string
    The type of the adapter (databricks or spark)
    Catalog string
    The catalog where to create models (only for the databricks adapter)
    CredentialId int
    The system Databricks credential ID
    ProjectId int
    Project ID to create the Databricks credential in
    Schema string
    The schema where to create models
    TargetName string
    Target name
    Token string
    Token for Databricks user
    adapterId Integer
    Databricks adapter ID for the credential
    adapterType String
    The type of the adapter (databricks or spark)
    catalog String
    The catalog where to create models (only for the databricks adapter)
    credentialId Integer
    The system Databricks credential ID
    projectId Integer
    Project ID to create the Databricks credential in
    schema String
    The schema where to create models
    targetName String
    Target name
    token String
    Token for Databricks user
    adapterId number
    Databricks adapter ID for the credential
    adapterType string
    The type of the adapter (databricks or spark)
    catalog string
    The catalog where to create models (only for the databricks adapter)
    credentialId number
    The system Databricks credential ID
    projectId number
    Project ID to create the Databricks credential in
    schema string
    The schema where to create models
    targetName string
    Target name
    token string
    Token for Databricks user
    adapter_id int
    Databricks adapter ID for the credential
    adapter_type str
    The type of the adapter (databricks or spark)
    catalog str
    The catalog where to create models (only for the databricks adapter)
    credential_id int
    The system Databricks credential ID
    project_id int
    Project ID to create the Databricks credential in
    schema str
    The schema where to create models
    target_name str
    Target name
    token str
    Token for Databricks user
    adapterId Number
    Databricks adapter ID for the credential
    adapterType String
    The type of the adapter (databricks or spark)
    catalog String
    The catalog where to create models (only for the databricks adapter)
    credentialId Number
    The system Databricks credential ID
    projectId Number
    Project ID to create the Databricks credential in
    schema String
    The schema where to create models
    targetName String
    Target name
    token String
    Token for Databricks user

    Import

    using import blocks (requires Terraform >= 1.5)

    import {

    to = dbtcloud_databricks_credential.my_databricks_credential

    id = “project_id:credential_id”

    }

    import {

    to = dbtcloud_databricks_credential.my_databricks_credential

    id = “12345:6789”

    }

    using the older import command

    $ pulumi import dbtcloud:index/databricksCredential:DatabricksCredential my_databricks_credential "project_id:credential_id"
    
    $ pulumi import dbtcloud:index/databricksCredential:DatabricksCredential my_databricks_credential 12345:6789
    

    To learn more about importing existing cloud resources, see Importing resources.

    Package Details

    Repository
    dbtcloud pulumi/pulumi-dbtcloud
    License
    Apache-2.0
    Notes
    This Pulumi package is based on the dbtcloud Terraform Provider.
    dbtcloud logo
    dbt Cloud v0.1.10 published on Thursday, Jul 18, 2024 by Pulumi