databricks.getTables
Explore with Pulumi AI
Related Resources
The following resources are used in the same context:
- databricks.Schema to manage schemas within Unity Catalog.
- databricks.Catalog to manage catalogs within Unity Catalog.
Example Usage
Granting
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Pulumi;
using Databricks = Pulumi.Databricks;
return await Deployment.RunAsync(async() =>
{
var thingsTables = await Databricks.GetTables.InvokeAsync(new()
{
CatalogName = "sandbox",
SchemaName = "things",
});
var thingsGrants = new List<Databricks.Grants>();
foreach (var range in )
{
thingsGrants.Add(new Databricks.Grants($"thingsGrants-{range.Key}", new()
{
Table = range.Value,
GrantDetails = new[]
{
new Databricks.Inputs.GrantsGrantArgs
{
Principal = "sensitive",
Privileges = new[]
{
"SELECT",
"MODIFY",
},
},
},
}));
}
});
package main
import (
"github.com/pulumi/pulumi-databricks/sdk/go/databricks"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
thingsTables, err := databricks.GetTables(ctx, &databricks.GetTablesArgs{
CatalogName: "sandbox",
SchemaName: "things",
}, nil)
if err != nil {
return err
}
var thingsGrants []*databricks.Grants
for key0, val0 := range thingsTables.Ids {
__res, err := databricks.NewGrants(ctx, fmt.Sprintf("thingsGrants-%v", key0), &databricks.GrantsArgs{
Table: pulumi.String(val0),
Grants: databricks.GrantsGrantArray{
&databricks.GrantsGrantArgs{
Principal: pulumi.String("sensitive"),
Privileges: pulumi.StringArray{
pulumi.String("SELECT"),
pulumi.String("MODIFY"),
},
},
},
})
if err != nil {
return err
}
thingsGrants = append(thingsGrants, __res)
}
return nil
})
}
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.databricks.DatabricksFunctions;
import com.pulumi.databricks.inputs.GetTablesArgs;
import com.pulumi.databricks.Grants;
import com.pulumi.databricks.GrantsArgs;
import com.pulumi.databricks.inputs.GrantsGrantArgs;
import com.pulumi.codegen.internal.KeyedValue;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var thingsTables = DatabricksFunctions.getTables(GetTablesArgs.builder()
.catalogName("sandbox")
.schemaName("things")
.build());
final var thingsGrants = thingsTables.applyValue(getTablesResult -> {
final var resources = new ArrayList<Grants>();
for (var range : KeyedValue.of(getTablesResult.ids()) {
var resource = new Grants("thingsGrants-" + range.key(), GrantsArgs.builder()
.table(range.value())
.grants(GrantsGrantArgs.builder()
.principal("sensitive")
.privileges(
"SELECT",
"MODIFY")
.build())
.build());
resources.add(resource);
}
return resources;
});
}
}
import pulumi
import pulumi_databricks as databricks
things_tables = databricks.get_tables(catalog_name="sandbox",
schema_name="things")
things_grants = []
for range in [{"key": k, "value": v} for [k, v] in enumerate(things_tables.ids)]:
things_grants.append(databricks.Grants(f"thingsGrants-{range['key']}",
table=range["value"],
grants=[databricks.GrantsGrantArgs(
principal="sensitive",
privileges=[
"SELECT",
"MODIFY",
],
)]))
import * as pulumi from "@pulumi/pulumi";
import * as databricks from "@pulumi/databricks";
export = async () => {
const thingsTables = await databricks.getTables({
catalogName: "sandbox",
schemaName: "things",
});
const thingsGrants: databricks.Grants[] = [];
for (const range of thingsTables.ids.map((v, k) => ({key: k, value: v}))) {
thingsGrants.push(new databricks.Grants(`thingsGrants-${range.key}`, {
table: range.value,
grants: [{
principal: "sensitive",
privileges: [
"SELECT",
"MODIFY",
],
}],
}));
}
}
resources:
thingsGrants:
type: databricks:Grants
properties:
table: ${range.value}
grants:
- principal: sensitive
privileges:
- SELECT
- MODIFY
options: {}
variables:
thingsTables:
fn::invoke:
Function: databricks:getTables
Arguments:
catalogName: sandbox
schemaName: things
Using getTables
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getTables(args: GetTablesArgs, opts?: InvokeOptions): Promise<GetTablesResult>
function getTablesOutput(args: GetTablesOutputArgs, opts?: InvokeOptions): Output<GetTablesResult>
def get_tables(catalog_name: Optional[str] = None,
ids: Optional[Sequence[str]] = None,
schema_name: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetTablesResult
def get_tables_output(catalog_name: Optional[pulumi.Input[str]] = None,
ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
schema_name: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetTablesResult]
func GetTables(ctx *Context, args *GetTablesArgs, opts ...InvokeOption) (*GetTablesResult, error)
func GetTablesOutput(ctx *Context, args *GetTablesOutputArgs, opts ...InvokeOption) GetTablesResultOutput
> Note: This function is named GetTables
in the Go SDK.
public static class GetTables
{
public static Task<GetTablesResult> InvokeAsync(GetTablesArgs args, InvokeOptions? opts = null)
public static Output<GetTablesResult> Invoke(GetTablesInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetTablesResult> getTables(GetTablesArgs args, InvokeOptions options)
// Output-based functions aren't available in Java yet
fn::invoke:
function: databricks:index/getTables:getTables
arguments:
# arguments dictionary
The following arguments are supported:
- Catalog
Name string Name of databricks_catalog
- Schema
Name string Name of databricks_schema
- Ids List<string>
set of databricks.Table full names:
catalog
.schema
.table
- Catalog
Name string Name of databricks_catalog
- Schema
Name string Name of databricks_schema
- Ids []string
set of databricks.Table full names:
catalog
.schema
.table
- catalog
Name String Name of databricks_catalog
- schema
Name String Name of databricks_schema
- ids List<String>
set of databricks.Table full names:
catalog
.schema
.table
- catalog
Name string Name of databricks_catalog
- schema
Name string Name of databricks_schema
- ids string[]
set of databricks.Table full names:
catalog
.schema
.table
- catalog_
name str Name of databricks_catalog
- schema_
name str Name of databricks_schema
- ids Sequence[str]
set of databricks.Table full names:
catalog
.schema
.table
- catalog
Name String Name of databricks_catalog
- schema
Name String Name of databricks_schema
- ids List<String>
set of databricks.Table full names:
catalog
.schema
.table
getTables Result
The following output properties are available:
- Catalog
Name string - Id string
The provider-assigned unique ID for this managed resource.
- Ids List<string>
set of databricks.Table full names:
catalog
.schema
.table
- Schema
Name string
- Catalog
Name string - Id string
The provider-assigned unique ID for this managed resource.
- Ids []string
set of databricks.Table full names:
catalog
.schema
.table
- Schema
Name string
- catalog
Name String - id String
The provider-assigned unique ID for this managed resource.
- ids List<String>
set of databricks.Table full names:
catalog
.schema
.table
- schema
Name String
- catalog
Name string - id string
The provider-assigned unique ID for this managed resource.
- ids string[]
set of databricks.Table full names:
catalog
.schema
.table
- schema
Name string
- catalog_
name str - id str
The provider-assigned unique ID for this managed resource.
- ids Sequence[str]
set of databricks.Table full names:
catalog
.schema
.table
- schema_
name str
- catalog
Name String - id String
The provider-assigned unique ID for this managed resource.
- ids List<String>
set of databricks.Table full names:
catalog
.schema
.table
- schema
Name String
Package Details
- Repository
- databricks pulumi/pulumi-databricks
- License
- Apache-2.0
- Notes
This Pulumi package is based on the
databricks
Terraform Provider.