databricks logo
Databricks v1.14.0, May 23 23

databricks.MlflowExperiment

Explore with Pulumi AI

This resource allows you to manage MLflow experiments in Databricks.

Access Control

  • databricks.Permissions can control which groups or individual users can Read, Edit, or Manage individual experiments.

The following resources are often used in the same context:

Example Usage

using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Databricks = Pulumi.Databricks;

return await Deployment.RunAsync(() => 
{
    var me = Databricks.GetCurrentUser.Invoke();

    var @this = new Databricks.MlflowExperiment("this", new()
    {
        ArtifactLocation = "dbfs:/tmp/my-experiment",
        Description = "My MLflow experiment description",
    });

});
package main

import (
	"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)

func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := databricks.GetCurrentUser(ctx, nil, nil)
		if err != nil {
			return err
		}
		_, err = databricks.NewMlflowExperiment(ctx, "this", &databricks.MlflowExperimentArgs{
			ArtifactLocation: pulumi.String("dbfs:/tmp/my-experiment"),
			Description:      pulumi.String("My MLflow experiment description"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
package generated_program;

import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.MlflowExperiment;
import com.pulumi.databricks.MlflowExperimentArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;

public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }

    public static void stack(Context ctx) {
        final var me = DatabricksFunctions.getCurrentUser();

        var this_ = new MlflowExperiment("this", MlflowExperimentArgs.builder()        
            .artifactLocation("dbfs:/tmp/my-experiment")
            .description("My MLflow experiment description")
            .build());

    }
}
import pulumi
import pulumi_databricks as databricks

me = databricks.get_current_user()
this = databricks.MlflowExperiment("this",
    artifact_location="dbfs:/tmp/my-experiment",
    description="My MLflow experiment description")
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";

const me = databricks.getCurrentUser({});
const _this = new databricks.MlflowExperiment("this", {
    artifactLocation: "dbfs:/tmp/my-experiment",
    description: "My MLflow experiment description",
});
resources:
  this:
    type: databricks:MlflowExperiment
    properties:
      artifactLocation: dbfs:/tmp/my-experiment
      description: My MLflow experiment description
variables:
  me:
    fn::invoke:
      Function: databricks:getCurrentUser
      Arguments: {}

Create MlflowExperiment Resource

new MlflowExperiment(name: string, args?: MlflowExperimentArgs, opts?: CustomResourceOptions);
@overload
def MlflowExperiment(resource_name: str,
                     opts: Optional[ResourceOptions] = None,
                     artifact_location: Optional[str] = None,
                     creation_time: Optional[int] = None,
                     description: Optional[str] = None,
                     experiment_id: Optional[str] = None,
                     last_update_time: Optional[int] = None,
                     lifecycle_stage: Optional[str] = None,
                     name: Optional[str] = None)
@overload
def MlflowExperiment(resource_name: str,
                     args: Optional[MlflowExperimentArgs] = None,
                     opts: Optional[ResourceOptions] = None)
func NewMlflowExperiment(ctx *Context, name string, args *MlflowExperimentArgs, opts ...ResourceOption) (*MlflowExperiment, error)
public MlflowExperiment(string name, MlflowExperimentArgs? args = null, CustomResourceOptions? opts = null)
public MlflowExperiment(String name, MlflowExperimentArgs args)
public MlflowExperiment(String name, MlflowExperimentArgs args, CustomResourceOptions options)
type: databricks:MlflowExperiment
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.

name string
The unique name of the resource.
args MlflowExperimentArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
resource_name str
The unique name of the resource.
args MlflowExperimentArgs
The arguments to resource properties.
opts ResourceOptions
Bag of options to control resource's behavior.
ctx Context
Context object for the current deployment.
name string
The unique name of the resource.
args MlflowExperimentArgs
The arguments to resource properties.
opts ResourceOption
Bag of options to control resource's behavior.
name string
The unique name of the resource.
args MlflowExperimentArgs
The arguments to resource properties.
opts CustomResourceOptions
Bag of options to control resource's behavior.
name String
The unique name of the resource.
args MlflowExperimentArgs
The arguments to resource properties.
options CustomResourceOptions
Bag of options to control resource's behavior.

MlflowExperiment Resource Properties

To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.

Inputs

The MlflowExperiment resource accepts the following input properties:

ArtifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

CreationTime int
Description string

The description of the MLflow experiment.

ExperimentId string
LastUpdateTime int
LifecycleStage string
Name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

ArtifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

CreationTime int
Description string

The description of the MLflow experiment.

ExperimentId string
LastUpdateTime int
LifecycleStage string
Name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation String

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime Integer
description String

The description of the MLflow experiment.

experimentId String
lastUpdateTime Integer
lifecycleStage String
name String

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime number
description string

The description of the MLflow experiment.

experimentId string
lastUpdateTime number
lifecycleStage string
name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifact_location str

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creation_time int
description str

The description of the MLflow experiment.

experiment_id str
last_update_time int
lifecycle_stage str
name str

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation String

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime Number
description String

The description of the MLflow experiment.

experimentId String
lastUpdateTime Number
lifecycleStage String
name String

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

Outputs

All input properties are implicitly available as output properties. Additionally, the MlflowExperiment resource produces the following output properties:

Id string

The provider-assigned unique ID for this managed resource.

Id string

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

id string

The provider-assigned unique ID for this managed resource.

id str

The provider-assigned unique ID for this managed resource.

id String

The provider-assigned unique ID for this managed resource.

Look up Existing MlflowExperiment Resource

Get an existing MlflowExperiment resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.

public static get(name: string, id: Input<ID>, state?: MlflowExperimentState, opts?: CustomResourceOptions): MlflowExperiment
@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        artifact_location: Optional[str] = None,
        creation_time: Optional[int] = None,
        description: Optional[str] = None,
        experiment_id: Optional[str] = None,
        last_update_time: Optional[int] = None,
        lifecycle_stage: Optional[str] = None,
        name: Optional[str] = None) -> MlflowExperiment
func GetMlflowExperiment(ctx *Context, name string, id IDInput, state *MlflowExperimentState, opts ...ResourceOption) (*MlflowExperiment, error)
public static MlflowExperiment Get(string name, Input<string> id, MlflowExperimentState? state, CustomResourceOptions? opts = null)
public static MlflowExperiment get(String name, Output<String> id, MlflowExperimentState state, CustomResourceOptions options)
Resource lookup is not supported in YAML
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
resource_name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
name
The unique name of the resulting resource.
id
The unique provider ID of the resource to lookup.
state
Any extra arguments used during the lookup.
opts
A bag of options that control this resource's behavior.
The following state arguments are supported:
ArtifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

CreationTime int
Description string

The description of the MLflow experiment.

ExperimentId string
LastUpdateTime int
LifecycleStage string
Name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

ArtifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

CreationTime int
Description string

The description of the MLflow experiment.

ExperimentId string
LastUpdateTime int
LifecycleStage string
Name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation String

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime Integer
description String

The description of the MLflow experiment.

experimentId String
lastUpdateTime Integer
lifecycleStage String
name String

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation string

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime number
description string

The description of the MLflow experiment.

experimentId string
lastUpdateTime number
lifecycleStage string
name string

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifact_location str

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creation_time int
description str

The description of the MLflow experiment.

experiment_id str
last_update_time int
lifecycle_stage str
name str

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

artifactLocation String

Path to dbfs:/ or s3:// artifact location of the MLflow experiment.

creationTime Number
description String

The description of the MLflow experiment.

experimentId String
lastUpdateTime Number
lifecycleStage String
name String

Name of MLflow experiment. It must be an absolute path within the Databricks workspace, e.g. /Users/<some-username>/my-experiment. For more information about changes to experiment naming conventions, see mlflow docs.

Import

The experiment resource can be imported using the id of the experiment bash

 $ pulumi import databricks:index/mlflowExperiment:MlflowExperiment this <experiment-id>

Package Details

Repository
databricks pulumi/pulumi-databricks
License
Apache-2.0
Notes

This Pulumi package is based on the databricks Terraform Provider.