databricks logo
Databricks v1.14.0, May 23 23

databricks.Library

Explore with Pulumi AI

Installs a library on databricks_cluster. Each different type of library has a slightly different syntax. It’s possible to set only one type of library within one resource. Otherwise, the plan will fail with an error.

Note databricks.Library resource would always start the associated cluster if it’s not running, so make sure to have auto-termination configured. It’s not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.

Installing library on all clusters

You can install libraries on all clusters with the help of databricks.getClusters data resource:

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

export = async () => {
    const all = await databricks.getClusters({});
    const cli: databricks.Library[] = [];
    for (const range of all.ids.map((v, k) => ({key: k, value: v}))) {
        cli.push(new databricks.Library(`cli-${range.key}`, {
            clusterId: range.key,
            pypi: {
                "package": "databricks-cli",
            },
        }));
    }
}
import pulumi
import pulumi_databricks as databricks

all = databricks.get_clusters()
cli = []
for range in [{"key": k, "value": v} for [k, v] in enumerate(all.ids)]:
    cli.append(databricks.Library(f"cli-{range['key']}",
        cluster_id=range["key"],
        pypi=databricks.LibraryPypiArgs(
            package="databricks-cli",
        )))
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(async() => 
{
    var all = await Databricks.GetClusters.InvokeAsync();

    var cli = new List<Databricks.Library>();
    foreach (var range in )
    {
        cli.Add(new Databricks.Library($"cli-{range.Key}", new()
        {
            ClusterId = range.Key,
            Pypi = new Databricks.Inputs.LibraryPypiArgs
            {
                Package = "databricks-cli",
            },
        }));
    }
});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		all, err := databricks.GetClusters(ctx, nil, nil)
		if err != nil {
			return err
		}
		var cli []*databricks.Library
		for key0, _ := range all.Ids {
			__res, err := databricks.NewLibrary(ctx, fmt.Sprintf("cli-%v", key0), &databricks.LibraryArgs{
				ClusterId: pulumi.Float64(key0),
				Pypi: &databricks.LibraryPypiArgs{
					Package: pulumi.String("databricks-cli"),
				},
			})
			if err != nil {
				return err
			}
			cli = append(cli, __res)
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetClustersArgs;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryPypiArgs;
import com.pulumi.codegen.internal.KeyedValue;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var all = DatabricksFunctions.getClusters();

        final var cli = all.applyValue(getClustersResult -> {
            final var resources = new ArrayList<Library>();
            for (var range : KeyedValue.of(getClustersResult.ids()) {
                var resource = new Library("cli-" + range.key(), LibraryArgs.builder()                
                    .clusterId(range.key())
                    .pypi(LibraryPypiArgs.builder()
                        .package_("databricks-cli")
                        .build())
                    .build());

                resources.add(resource);
            }

            return resources;
        });

    }
}
resources:
  cli:
    type: databricks:Library
    properties:
      clusterId: ${range.key}
      pypi:
        package: databricks-cli
    options: {}
variables:
  all:
    fn::invoke:
      Function: databricks:getClusters
      Arguments: {}

Java/Scala JAR

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
    source: `${path.module}/app-0.0.1.jar`,
    path: "/FileStore/app-0.0.1.jar",
});
const appLibrary = new databricks.Library("appLibrary", {
    clusterId: databricks_cluster["this"].id,
    jar: appDbfsFile.dbfsPath,
});
import pulumi
import pulumi_databricks as databricks

app_dbfs_file = databricks.DbfsFile("appDbfsFile",
    source=f"{path['module']}/app-0.0.1.jar",
    path="/FileStore/app-0.0.1.jar")
app_library = databricks.Library("appLibrary",
    cluster_id=databricks_cluster["this"]["id"],
    jar=app_dbfs_file.dbfs_path)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var appDbfsFile = new Databricks.DbfsFile("appDbfsFile", new()
    {
        Source = $"{path.Module}/app-0.0.1.jar",
        Path = "/FileStore/app-0.0.1.jar",
    });

    var appLibrary = new Databricks.Library("appLibrary", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Jar = appDbfsFile.DbfsPath,
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		appDbfsFile, err := databricks.NewDbfsFile(ctx, "appDbfsFile", &databricks.DbfsFileArgs{
			Source: pulumi.String(fmt.Sprintf("%v/app-0.0.1.jar", path.Module)),
			Path:   pulumi.String("/FileStore/app-0.0.1.jar"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewLibrary(ctx, "appLibrary", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Jar:       appDbfsFile.DbfsPath,
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DbfsFile;
import com.pulumi.databricks.DbfsFileArgs;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var appDbfsFile = new DbfsFile("appDbfsFile", DbfsFileArgs.builder()        
            .source(String.format("%s/app-0.0.1.jar", path.module()))
            .path("/FileStore/app-0.0.1.jar")
            .build());

        var appLibrary = new Library("appLibrary", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .jar(appDbfsFile.dbfsPath())
            .build());

    }
}
resources:
  appDbfsFile:
    type: databricks:DbfsFile
    properties:
      source: ${path.module}/app-0.0.1.jar
      path: /FileStore/app-0.0.1.jar
  appLibrary:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      jar: ${appDbfsFile.dbfsPath}

Java/Scala Maven

Installing artifacts from Maven repository. You can also optionally specify a repo parameter for a custom Maven-style repository, that should be accessible without any authentication. Maven libraries are resolved in Databricks Control Plane, so repo should be accessible from it. It can even be properly configured maven s3 wagon, AWS CodeArtifact or Azure Artifacts.

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const deequ = new databricks.Library("deequ", {
    clusterId: databricks_cluster["this"].id,
    maven: {
        coordinates: "com.amazon.deequ:deequ:1.0.4",
        exclusions: ["org.apache.avro:avro"],
    },
});
import pulumi
import pulumi_databricks as databricks

deequ = databricks.Library("deequ",
    cluster_id=databricks_cluster["this"]["id"],
    maven=databricks.LibraryMavenArgs(
        coordinates="com.amazon.deequ:deequ:1.0.4",
        exclusions=["org.apache.avro:avro"],
    ))
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var deequ = new Databricks.Library("deequ", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Maven = new Databricks.Inputs.LibraryMavenArgs
        {
            Coordinates = "com.amazon.deequ:deequ:1.0.4",
            Exclusions = new[]
            {
                "org.apache.avro:avro",
            },
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewLibrary(ctx, "deequ", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Maven: &databricks.LibraryMavenArgs{
				Coordinates: pulumi.String("com.amazon.deequ:deequ:1.0.4"),
				Exclusions: pulumi.StringArray{
					pulumi.String("org.apache.avro:avro"),
				},
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryMavenArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var deequ = new Library("deequ", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .maven(LibraryMavenArgs.builder()
                .coordinates("com.amazon.deequ:deequ:1.0.4")
                .exclusions("org.apache.avro:avro")
                .build())
            .build());

    }
}
resources:
  deequ:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      maven:
        coordinates: com.amazon.deequ:deequ:1.0.4
        exclusions:
          - org.apache.avro:avro

Python Wheel

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
    source: `${path.module}/baz.whl`,
    path: "/FileStore/baz.whl",
});
const appLibrary = new databricks.Library("appLibrary", {
    clusterId: databricks_cluster["this"].id,
    whl: appDbfsFile.dbfsPath,
});
import pulumi
import pulumi_databricks as databricks

app_dbfs_file = databricks.DbfsFile("appDbfsFile",
    source=f"{path['module']}/baz.whl",
    path="/FileStore/baz.whl")
app_library = databricks.Library("appLibrary",
    cluster_id=databricks_cluster["this"]["id"],
    whl=app_dbfs_file.dbfs_path)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var appDbfsFile = new Databricks.DbfsFile("appDbfsFile", new()
    {
        Source = $"{path.Module}/baz.whl",
        Path = "/FileStore/baz.whl",
    });

    var appLibrary = new Databricks.Library("appLibrary", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Whl = appDbfsFile.DbfsPath,
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		appDbfsFile, err := databricks.NewDbfsFile(ctx, "appDbfsFile", &databricks.DbfsFileArgs{
			Source: pulumi.String(fmt.Sprintf("%v/baz.whl", path.Module)),
			Path:   pulumi.String("/FileStore/baz.whl"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewLibrary(ctx, "appLibrary", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Whl:       appDbfsFile.DbfsPath,
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DbfsFile;
import com.pulumi.databricks.DbfsFileArgs;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var appDbfsFile = new DbfsFile("appDbfsFile", DbfsFileArgs.builder()        
            .source(String.format("%s/baz.whl", path.module()))
            .path("/FileStore/baz.whl")
            .build());

        var appLibrary = new Library("appLibrary", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .whl(appDbfsFile.dbfsPath())
            .build());

    }
}
resources:
  appDbfsFile:
    type: databricks:DbfsFile
    properties:
      source: ${path.module}/baz.whl
      path: /FileStore/baz.whl
  appLibrary:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      whl: ${appDbfsFile.dbfsPath}

Python PyPI

Installing Python PyPI artifacts. You can optionally also specify the repo parameter for a custom PyPI mirror, which should be accessible without any authentication for the network that cluster runs in.

Note repo host should be accessible from the Internet by Databricks control plane. If connectivity to custom PyPI repositories is required, please modify cluster-node /etc/pip.conf through databricks_global_init_script.

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const fbprophet = new databricks.Library("fbprophet", {
    clusterId: databricks_cluster["this"].id,
    pypi: {
        "package": "fbprophet==0.6",
    },
});
import pulumi
import pulumi_databricks as databricks

fbprophet = databricks.Library("fbprophet",
    cluster_id=databricks_cluster["this"]["id"],
    pypi=databricks.LibraryPypiArgs(
        package="fbprophet==0.6",
    ))
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var fbprophet = new Databricks.Library("fbprophet", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Pypi = new Databricks.Inputs.LibraryPypiArgs
        {
            Package = "fbprophet==0.6",
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewLibrary(ctx, "fbprophet", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Pypi: &databricks.LibraryPypiArgs{
				Package: pulumi.String("fbprophet==0.6"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryPypiArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var fbprophet = new Library("fbprophet", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .pypi(LibraryPypiArgs.builder()
                .package_("fbprophet==0.6")
                .build())
            .build());

    }
}
resources:
  fbprophet:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      pypi:
        package: fbprophet==0.6

Python EGG

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
    source: `${path.module}/foo.egg`,
    path: "/FileStore/foo.egg",
});
const appLibrary = new databricks.Library("appLibrary", {
    clusterId: databricks_cluster["this"].id,
    egg: appDbfsFile.dbfsPath,
});
import pulumi
import pulumi_databricks as databricks

app_dbfs_file = databricks.DbfsFile("appDbfsFile",
    source=f"{path['module']}/foo.egg",
    path="/FileStore/foo.egg")
app_library = databricks.Library("appLibrary",
    cluster_id=databricks_cluster["this"]["id"],
    egg=app_dbfs_file.dbfs_path)
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var appDbfsFile = new Databricks.DbfsFile("appDbfsFile", new()
    {
        Source = $"{path.Module}/foo.egg",
        Path = "/FileStore/foo.egg",
    });

    var appLibrary = new Databricks.Library("appLibrary", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Egg = appDbfsFile.DbfsPath,
    });

});
package main

import (
	"fmt"

	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		appDbfsFile, err := databricks.NewDbfsFile(ctx, "appDbfsFile", &databricks.DbfsFileArgs{
			Source: pulumi.String(fmt.Sprintf("%v/foo.egg", path.Module)),
			Path:   pulumi.String("/FileStore/foo.egg"),
		})
		if err != nil {
			return err
		}
		_, err = databricks.NewLibrary(ctx, "appLibrary", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Egg:       appDbfsFile.DbfsPath,
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DbfsFile;
import com.pulumi.databricks.DbfsFileArgs;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var appDbfsFile = new DbfsFile("appDbfsFile", DbfsFileArgs.builder()        
            .source(String.format("%s/foo.egg", path.module()))
            .path("/FileStore/foo.egg")
            .build());

        var appLibrary = new Library("appLibrary", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .egg(appDbfsFile.dbfsPath())
            .build());

    }
}
resources:
  appDbfsFile:
    type: databricks:DbfsFile
    properties:
      source: ${path.module}/foo.egg
      path: /FileStore/foo.egg
  appLibrary:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      egg: ${appDbfsFile.dbfsPath}

R CRan

Installing artifacts from CRan. You can also optionally specify a repo parameter for a custom cran mirror.

import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const rkeops = new databricks.Library("rkeops", {
    clusterId: databricks_cluster["this"].id,
    cran: {
        "package": "rkeops",
    },
});
import pulumi
import pulumi_databricks as databricks

rkeops = databricks.Library("rkeops",
    cluster_id=databricks_cluster["this"]["id"],
    cran=databricks.LibraryCranArgs(
        package="rkeops",
    ))
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var rkeops = new Databricks.Library("rkeops", new()
    {
        ClusterId = databricks_cluster.This.Id,
        Cran = new Databricks.Inputs.LibraryCranArgs
        {
            Package = "rkeops",
        },
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.NewLibrary(ctx, "rkeops", &databricks.LibraryArgs{
			ClusterId: pulumi.Any(databricks_cluster.This.Id),
			Cran: &databricks.LibraryCranArgs{
				Package: pulumi.String("rkeops"),
			},
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.Library;
import com.pulumi.databricks.LibraryArgs;
import com.pulumi.databricks.inputs.LibraryCranArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        var rkeops = new Library("rkeops", LibraryArgs.builder()        
            .clusterId(databricks_cluster.this().id())
            .cran(LibraryCranArgs.builder()
                .package_("rkeops")
                .build())
            .build());

    }
}
resources:
  rkeops:
    type: databricks:Library
    properties:
      clusterId: ${databricks_cluster.this.id}
      cran:
        package: rkeops

The following resources are often used in the same context:

Create Library Resource

new Library(name: string, args: LibraryArgs, opts?: CustomResourceOptions);
@overload
def Library(resource_name: str,
            opts: Optional[ResourceOptions] = None,
            cluster_id: Optional[str] = None,
            cran: Optional[LibraryCranArgs] = None,
            egg: Optional[str] = None,
            jar: Optional[str] = None,
            maven: Optional[LibraryMavenArgs] = None,
            pypi: Optional[LibraryPypiArgs] = None,
            whl: Optional[str] = None)
@overload
def Library(resource_name: str,
            args: LibraryArgs,
            opts: Optional[ResourceOptions] = None)
func NewLibrary(ctx *Context, name string, args LibraryArgs, opts ...ResourceOption) (*Library, error)
public Library(string name, LibraryArgs args, CustomResourceOptions? opts = null)
public Library(String name, LibraryArgs args)
public Library(String name, LibraryArgs args, CustomResourceOptions options)
type: databricks:Library
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args LibraryArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args LibraryArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args LibraryArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args LibraryArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args LibraryArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

Library Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The Library resource accepts the following input properties:

Outputs

All input properties are implicitly available as output properties. Additionally, the Library resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing Library Resource

Get an existing Library resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: LibraryState, opts?: CustomResourceOptions): Library
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        cluster_id: Optional[str] = None,
        cran: Optional[LibraryCranArgs] = None,
        egg: Optional[str] = None,
        jar: Optional[str] = None,
        maven: Optional[LibraryMavenArgs] = None,
        pypi: Optional[LibraryPypiArgs] = None,
        whl: Optional[str] = None) -> Library
func GetLibrary(ctx *Context, name string, id IDInput, state *LibraryState, opts ...ResourceOption) (*Library, error)
public static Library Get(string name, Input<string> id, LibraryState? state, CustomResourceOptions? opts = null)
public static Library get(String name, Output<String> id, LibraryState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.

Supporting Types

LibraryCran

Package string
Repo string
Package string
Repo string
package_ String
repo String
package string
repo string
package str
repo str
package String
repo String

LibraryMaven

Coordinates string
Exclusions List<string>
Repo string
Coordinates string
Exclusions []string
Repo string
coordinates String
exclusions List<String>
repo String
coordinates string
exclusions string[]
repo string
coordinates str
exclusions Sequence[str]
repo str
coordinates String
exclusions List<String>
repo String

LibraryPypi

Package string
Repo string
Package string
Repo string
package_ String
repo String
package string
repo string
package str
repo str
package String
repo String

Import

-> Note Importing this resource is not currently supported.

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.