logdna.Archive
Explore with Pulumi AI
# Resource: logdna.Archive
Manages LogDNA Archiving configuration for an account.
Example IBM COS Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
ibmConfig: {
apikey: "key",
bucket: "example",
endpoint: "example.com",
resourceinstanceid: "id",
},
integration: "ibm",
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
ibm_config={
"apikey": "key",
"bucket": "example",
"endpoint": "example.com",
"resourceinstanceid": "id",
},
integration="ibm")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
IbmConfig: &logdna.ArchiveIbmConfigArgs{
Apikey: pulumi.String("key"),
Bucket: pulumi.String("example"),
Endpoint: pulumi.String("example.com"),
Resourceinstanceid: pulumi.String("id"),
},
Integration: pulumi.String("ibm"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
IbmConfig = new Logdna.Inputs.ArchiveIbmConfigArgs
{
Apikey = "key",
Bucket = "example",
Endpoint = "example.com",
Resourceinstanceid = "id",
},
Integration = "ibm",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveIbmConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.ibmConfig(ArchiveIbmConfigArgs.builder()
.apikey("key")
.bucket("example")
.endpoint("example.com")
.resourceinstanceid("id")
.build())
.integration("ibm")
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
ibmConfig:
apikey: key
bucket: example
endpoint: example.com
resourceinstanceid: id
integration: ibm
Example AWS S3 Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
integration: "s3",
s3Config: {
bucket: "example",
},
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
integration="s3",
s3_config={
"bucket": "example",
})
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
Integration: pulumi.String("s3"),
S3Config: &logdna.ArchiveS3ConfigArgs{
Bucket: pulumi.String("example"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
Integration = "s3",
S3Config = new Logdna.Inputs.ArchiveS3ConfigArgs
{
Bucket = "example",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveS3ConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.integration("s3")
.s3Config(ArchiveS3ConfigArgs.builder()
.bucket("example")
.build())
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
integration: s3
s3Config:
bucket: example
Example Azure Blob Storage Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
azblobConfig: {
accountkey: "example key",
accountname: "example name",
},
integration: "azblob",
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
azblob_config={
"accountkey": "example key",
"accountname": "example name",
},
integration="azblob")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
AzblobConfig: &logdna.ArchiveAzblobConfigArgs{
Accountkey: pulumi.String("example key"),
Accountname: pulumi.String("example name"),
},
Integration: pulumi.String("azblob"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
AzblobConfig = new Logdna.Inputs.ArchiveAzblobConfigArgs
{
Accountkey = "example key",
Accountname = "example name",
},
Integration = "azblob",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveAzblobConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.azblobConfig(ArchiveAzblobConfigArgs.builder()
.accountkey("example key")
.accountname("example name")
.build())
.integration("azblob")
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
azblobConfig:
accountkey: example key
accountname: example name
integration: azblob
Example Google Cloud Services Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
gcsConfig: {
bucket: "example",
projectid: "id",
},
integration: "gcs",
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
gcs_config={
"bucket": "example",
"projectid": "id",
},
integration="gcs")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
GcsConfig: &logdna.ArchiveGcsConfigArgs{
Bucket: pulumi.String("example"),
Projectid: pulumi.String("id"),
},
Integration: pulumi.String("gcs"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
GcsConfig = new Logdna.Inputs.ArchiveGcsConfigArgs
{
Bucket = "example",
Projectid = "id",
},
Integration = "gcs",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveGcsConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.gcsConfig(ArchiveGcsConfigArgs.builder()
.bucket("example")
.projectid("id")
.build())
.integration("gcs")
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
gcsConfig:
bucket: example
projectid: id
integration: gcs
Example Digital Ocean Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
dosConfig: {
accesskey: "key",
endpoint: "example.com",
secretkey: "key",
space: "example",
},
integration: "dos",
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
dos_config={
"accesskey": "key",
"endpoint": "example.com",
"secretkey": "key",
"space": "example",
},
integration="dos")
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
DosConfig: &logdna.ArchiveDosConfigArgs{
Accesskey: pulumi.String("key"),
Endpoint: pulumi.String("example.com"),
Secretkey: pulumi.String("key"),
Space: pulumi.String("example"),
},
Integration: pulumi.String("dos"),
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
DosConfig = new Logdna.Inputs.ArchiveDosConfigArgs
{
Accesskey = "key",
Endpoint = "example.com",
Secretkey = "key",
Space = "example",
},
Integration = "dos",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveDosConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.dosConfig(ArchiveDosConfigArgs.builder()
.accesskey("key")
.endpoint("example.com")
.secretkey("key")
.space("example")
.build())
.integration("dos")
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
dosConfig:
accesskey: key
endpoint: example.com
secretkey: key
space: example
integration: dos
Example OpenStack Swift Archive
import * as pulumi from "@pulumi/pulumi";
import * as logdna from "@pulumi/logdna";
const config = new logdna.Archive("config", {
integration: "swift",
swiftConfig: {
authurl: "example.com",
expires: 5,
password: "password",
tenantname: "example",
username: "example user",
},
});
import pulumi
import pulumi_logdna as logdna
config = logdna.Archive("config",
integration="swift",
swift_config={
"authurl": "example.com",
"expires": 5,
"password": "password",
"tenantname": "example",
"username": "example user",
})
package main
import (
"github.com/pulumi/pulumi-terraform-provider/sdks/go/logdna/logdna"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := logdna.NewArchive(ctx, "config", &logdna.ArchiveArgs{
Integration: pulumi.String("swift"),
SwiftConfig: &logdna.ArchiveSwiftConfigArgs{
Authurl: pulumi.String("example.com"),
Expires: pulumi.Float64(5),
Password: pulumi.String("password"),
Tenantname: pulumi.String("example"),
Username: pulumi.String("example user"),
},
})
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Logdna = Pulumi.Logdna;
return await Deployment.RunAsync(() =>
{
var config = new Logdna.Archive("config", new()
{
Integration = "swift",
SwiftConfig = new Logdna.Inputs.ArchiveSwiftConfigArgs
{
Authurl = "example.com",
Expires = 5,
Password = "password",
Tenantname = "example",
Username = "example user",
},
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.logdna.Archive;
import com.pulumi.logdna.ArchiveArgs;
import com.pulumi.logdna.inputs.ArchiveSwiftConfigArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
var config = new Archive("config", ArchiveArgs.builder()
.integration("swift")
.swiftConfig(ArchiveSwiftConfigArgs.builder()
.authurl("example.com")
.expires(5)
.password("password")
.tenantname("example")
.username("example user")
.build())
.build());
}
}
resources:
config:
type: logdna:Archive
properties:
integration: swift
swiftConfig:
authurl: example.com
expires: 5
password: password
tenantname: example
username: example user
Create Archive Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new Archive(name: string, args: ArchiveArgs, opts?: CustomResourceOptions);
@overload
def Archive(resource_name: str,
args: ArchiveArgs,
opts: Optional[ResourceOptions] = None)
@overload
def Archive(resource_name: str,
opts: Optional[ResourceOptions] = None,
integration: Optional[str] = None,
archive_id: Optional[str] = None,
azblob_config: Optional[ArchiveAzblobConfigArgs] = None,
dos_config: Optional[ArchiveDosConfigArgs] = None,
gcs_config: Optional[ArchiveGcsConfigArgs] = None,
ibm_config: Optional[ArchiveIbmConfigArgs] = None,
s3_config: Optional[ArchiveS3ConfigArgs] = None,
swift_config: Optional[ArchiveSwiftConfigArgs] = None)
func NewArchive(ctx *Context, name string, args ArchiveArgs, opts ...ResourceOption) (*Archive, error)
public Archive(string name, ArchiveArgs args, CustomResourceOptions? opts = null)
public Archive(String name, ArchiveArgs args)
public Archive(String name, ArchiveArgs args, CustomResourceOptions options)
type: logdna:Archive
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args ArchiveArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args ArchiveArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args ArchiveArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args ArchiveArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args ArchiveArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var archiveResource = new Logdna.Archive("archiveResource", new()
{
Integration = "string",
ArchiveId = "string",
AzblobConfig = new Logdna.Inputs.ArchiveAzblobConfigArgs
{
Accountkey = "string",
Accountname = "string",
},
DosConfig = new Logdna.Inputs.ArchiveDosConfigArgs
{
Accesskey = "string",
Endpoint = "string",
Secretkey = "string",
Space = "string",
},
GcsConfig = new Logdna.Inputs.ArchiveGcsConfigArgs
{
Bucket = "string",
Projectid = "string",
},
IbmConfig = new Logdna.Inputs.ArchiveIbmConfigArgs
{
Apikey = "string",
Bucket = "string",
Endpoint = "string",
Resourceinstanceid = "string",
},
S3Config = new Logdna.Inputs.ArchiveS3ConfigArgs
{
Bucket = "string",
},
SwiftConfig = new Logdna.Inputs.ArchiveSwiftConfigArgs
{
Authurl = "string",
Password = "string",
Tenantname = "string",
Username = "string",
Expires = 0,
},
});
example, err := logdna.NewArchive(ctx, "archiveResource", &logdna.ArchiveArgs{
Integration: pulumi.String("string"),
ArchiveId: pulumi.String("string"),
AzblobConfig: &logdna.ArchiveAzblobConfigArgs{
Accountkey: pulumi.String("string"),
Accountname: pulumi.String("string"),
},
DosConfig: &logdna.ArchiveDosConfigArgs{
Accesskey: pulumi.String("string"),
Endpoint: pulumi.String("string"),
Secretkey: pulumi.String("string"),
Space: pulumi.String("string"),
},
GcsConfig: &logdna.ArchiveGcsConfigArgs{
Bucket: pulumi.String("string"),
Projectid: pulumi.String("string"),
},
IbmConfig: &logdna.ArchiveIbmConfigArgs{
Apikey: pulumi.String("string"),
Bucket: pulumi.String("string"),
Endpoint: pulumi.String("string"),
Resourceinstanceid: pulumi.String("string"),
},
S3Config: &logdna.ArchiveS3ConfigArgs{
Bucket: pulumi.String("string"),
},
SwiftConfig: &logdna.ArchiveSwiftConfigArgs{
Authurl: pulumi.String("string"),
Password: pulumi.String("string"),
Tenantname: pulumi.String("string"),
Username: pulumi.String("string"),
Expires: pulumi.Float64(0),
},
})
var archiveResource = new Archive("archiveResource", ArchiveArgs.builder()
.integration("string")
.archiveId("string")
.azblobConfig(ArchiveAzblobConfigArgs.builder()
.accountkey("string")
.accountname("string")
.build())
.dosConfig(ArchiveDosConfigArgs.builder()
.accesskey("string")
.endpoint("string")
.secretkey("string")
.space("string")
.build())
.gcsConfig(ArchiveGcsConfigArgs.builder()
.bucket("string")
.projectid("string")
.build())
.ibmConfig(ArchiveIbmConfigArgs.builder()
.apikey("string")
.bucket("string")
.endpoint("string")
.resourceinstanceid("string")
.build())
.s3Config(ArchiveS3ConfigArgs.builder()
.bucket("string")
.build())
.swiftConfig(ArchiveSwiftConfigArgs.builder()
.authurl("string")
.password("string")
.tenantname("string")
.username("string")
.expires(0)
.build())
.build());
archive_resource = logdna.Archive("archiveResource",
integration="string",
archive_id="string",
azblob_config={
"accountkey": "string",
"accountname": "string",
},
dos_config={
"accesskey": "string",
"endpoint": "string",
"secretkey": "string",
"space": "string",
},
gcs_config={
"bucket": "string",
"projectid": "string",
},
ibm_config={
"apikey": "string",
"bucket": "string",
"endpoint": "string",
"resourceinstanceid": "string",
},
s3_config={
"bucket": "string",
},
swift_config={
"authurl": "string",
"password": "string",
"tenantname": "string",
"username": "string",
"expires": 0,
})
const archiveResource = new logdna.Archive("archiveResource", {
integration: "string",
archiveId: "string",
azblobConfig: {
accountkey: "string",
accountname: "string",
},
dosConfig: {
accesskey: "string",
endpoint: "string",
secretkey: "string",
space: "string",
},
gcsConfig: {
bucket: "string",
projectid: "string",
},
ibmConfig: {
apikey: "string",
bucket: "string",
endpoint: "string",
resourceinstanceid: "string",
},
s3Config: {
bucket: "string",
},
swiftConfig: {
authurl: "string",
password: "string",
tenantname: "string",
username: "string",
expires: 0,
},
});
type: logdna:Archive
properties:
archiveId: string
azblobConfig:
accountkey: string
accountname: string
dosConfig:
accesskey: string
endpoint: string
secretkey: string
space: string
gcsConfig:
bucket: string
projectid: string
ibmConfig:
apikey: string
bucket: string
endpoint: string
resourceinstanceid: string
integration: string
s3Config:
bucket: string
swiftConfig:
authurl: string
expires: 0
password: string
tenantname: string
username: string
Archive Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The Archive resource accepts the following input properties:
- Integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- Archive
Id string - Azblob
Config ArchiveAzblob Config - Dos
Config ArchiveDos Config - Gcs
Config ArchiveGcs Config - Ibm
Config ArchiveIbm Config - S3Config
Archive
S3Config - Swift
Config ArchiveSwift Config
- Integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- Archive
Id string - Azblob
Config ArchiveAzblob Config Args - Dos
Config ArchiveDos Config Args - Gcs
Config ArchiveGcs Config Args - Ibm
Config ArchiveIbm Config Args - S3Config
Archive
S3Config Args - Swift
Config ArchiveSwift Config Args
- integration String
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- archive
Id String - azblob
Config ArchiveAzblob Config - dos
Config ArchiveDos Config - gcs
Config ArchiveGcs Config - ibm
Config ArchiveIbm Config - s3Config
Archive
S3Config - swift
Config ArchiveSwift Config
- integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- archive
Id string - azblob
Config ArchiveAzblob Config - dos
Config ArchiveDos Config - gcs
Config ArchiveGcs Config - ibm
Config ArchiveIbm Config - s3Config
Archive
S3Config - swift
Config ArchiveSwift Config
- integration str
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- archive_
id str - azblob_
config ArchiveAzblob Config Args - dos_
config ArchiveDos Config Args - gcs_
config ArchiveGcs Config Args - ibm_
config ArchiveIbm Config Args - s3_
config ArchiveS3Config Args - swift_
config ArchiveSwift Config Args
- integration String
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- archive
Id String - azblob
Config Property Map - dos
Config Property Map - gcs
Config Property Map - ibm
Config Property Map - s3Config Property Map
- swift
Config Property Map
Outputs
All input properties are implicitly available as output properties. Additionally, the Archive resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing Archive Resource
Get an existing Archive resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: ArchiveState, opts?: CustomResourceOptions): Archive
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
archive_id: Optional[str] = None,
azblob_config: Optional[ArchiveAzblobConfigArgs] = None,
dos_config: Optional[ArchiveDosConfigArgs] = None,
gcs_config: Optional[ArchiveGcsConfigArgs] = None,
ibm_config: Optional[ArchiveIbmConfigArgs] = None,
integration: Optional[str] = None,
s3_config: Optional[ArchiveS3ConfigArgs] = None,
swift_config: Optional[ArchiveSwiftConfigArgs] = None) -> Archive
func GetArchive(ctx *Context, name string, id IDInput, state *ArchiveState, opts ...ResourceOption) (*Archive, error)
public static Archive Get(string name, Input<string> id, ArchiveState? state, CustomResourceOptions? opts = null)
public static Archive get(String name, Output<String> id, ArchiveState state, CustomResourceOptions options)
resources: _: type: logdna:Archive get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Archive
Id string - Azblob
Config ArchiveAzblob Config - Dos
Config ArchiveDos Config - Gcs
Config ArchiveGcs Config - Ibm
Config ArchiveIbm Config - Integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- S3Config
Archive
S3Config - Swift
Config ArchiveSwift Config
- Archive
Id string - Azblob
Config ArchiveAzblob Config Args - Dos
Config ArchiveDos Config Args - Gcs
Config ArchiveGcs Config Args - Ibm
Config ArchiveIbm Config Args - Integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- S3Config
Archive
S3Config Args - Swift
Config ArchiveSwift Config Args
- archive
Id String - azblob
Config ArchiveAzblob Config - dos
Config ArchiveDos Config - gcs
Config ArchiveGcs Config - ibm
Config ArchiveIbm Config - integration String
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- s3Config
Archive
S3Config - swift
Config ArchiveSwift Config
- archive
Id string - azblob
Config ArchiveAzblob Config - dos
Config ArchiveDos Config - gcs
Config ArchiveGcs Config - ibm
Config ArchiveIbm Config - integration string
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- s3Config
Archive
S3Config - swift
Config ArchiveSwift Config
- archive_
id str - azblob_
config ArchiveAzblob Config Args - dos_
config ArchiveDos Config Args - gcs_
config ArchiveGcs Config Args - ibm_
config ArchiveIbm Config Args - integration str
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- s3_
config ArchiveS3Config Args - swift_
config ArchiveSwift Config Args
- archive
Id String - azblob
Config Property Map - dos
Config Property Map - gcs
Config Property Map - ibm
Config Property Map - integration String
- string (Required) Archiving integration. Valid values are
ibm
,s3
,azblob
,gcs
,dos
,swift
- s3Config Property Map
- swift
Config Property Map
Supporting Types
ArchiveAzblobConfig, ArchiveAzblobConfigArgs
- Accountkey string
- string (Required) Azure Blob Storage account access key
- Accountname string
- string (Required) Azure Blob Storage account name
- Accountkey string
- string (Required) Azure Blob Storage account access key
- Accountname string
- string (Required) Azure Blob Storage account name
- accountkey String
- string (Required) Azure Blob Storage account access key
- accountname String
- string (Required) Azure Blob Storage account name
- accountkey string
- string (Required) Azure Blob Storage account access key
- accountname string
- string (Required) Azure Blob Storage account name
- accountkey str
- string (Required) Azure Blob Storage account access key
- accountname str
- string (Required) Azure Blob Storage account name
- accountkey String
- string (Required) Azure Blob Storage account access key
- accountname String
- string (Required) Azure Blob Storage account name
ArchiveDosConfig, ArchiveDosConfigArgs
ArchiveGcsConfig, ArchiveGcsConfigArgs
ArchiveIbmConfig, ArchiveIbmConfigArgs
- Apikey string
- string (Required) IBM COS API key
- Bucket string
- string (Required) IBM COS storage bucket name
- Endpoint string
- string (Required) IBM COS public (region) endpoint
- Resourceinstanceid string
- string (Required) IBM COS instance identifier
- Apikey string
- string (Required) IBM COS API key
- Bucket string
- string (Required) IBM COS storage bucket name
- Endpoint string
- string (Required) IBM COS public (region) endpoint
- Resourceinstanceid string
- string (Required) IBM COS instance identifier
- apikey String
- string (Required) IBM COS API key
- bucket String
- string (Required) IBM COS storage bucket name
- endpoint String
- string (Required) IBM COS public (region) endpoint
- resourceinstanceid String
- string (Required) IBM COS instance identifier
- apikey string
- string (Required) IBM COS API key
- bucket string
- string (Required) IBM COS storage bucket name
- endpoint string
- string (Required) IBM COS public (region) endpoint
- resourceinstanceid string
- string (Required) IBM COS instance identifier
- apikey str
- string (Required) IBM COS API key
- bucket str
- string (Required) IBM COS storage bucket name
- endpoint str
- string (Required) IBM COS public (region) endpoint
- resourceinstanceid str
- string (Required) IBM COS instance identifier
- apikey String
- string (Required) IBM COS API key
- bucket String
- string (Required) IBM COS storage bucket name
- endpoint String
- string (Required) IBM COS public (region) endpoint
- resourceinstanceid String
- string (Required) IBM COS instance identifier
ArchiveS3Config, ArchiveS3ConfigArgs
- Bucket string
- Bucket string
- bucket String
- bucket string
- bucket str
- bucket String
ArchiveSwiftConfig, ArchiveSwiftConfigArgs
- Authurl string
- string (Required) OpenStack Swift authentication URL
- Password string
- string (Required) OpenStack Swift user password
- Tenantname string
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- Username string
- string (Required) OpenStack Swift user name
- Expires double
- integer (Optional) OpenStack Swift storage object days till expiry
- Authurl string
- string (Required) OpenStack Swift authentication URL
- Password string
- string (Required) OpenStack Swift user password
- Tenantname string
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- Username string
- string (Required) OpenStack Swift user name
- Expires float64
- integer (Optional) OpenStack Swift storage object days till expiry
- authurl String
- string (Required) OpenStack Swift authentication URL
- password String
- string (Required) OpenStack Swift user password
- tenantname String
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- username String
- string (Required) OpenStack Swift user name
- expires Double
- integer (Optional) OpenStack Swift storage object days till expiry
- authurl string
- string (Required) OpenStack Swift authentication URL
- password string
- string (Required) OpenStack Swift user password
- tenantname string
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- username string
- string (Required) OpenStack Swift user name
- expires number
- integer (Optional) OpenStack Swift storage object days till expiry
- authurl str
- string (Required) OpenStack Swift authentication URL
- password str
- string (Required) OpenStack Swift user password
- tenantname str
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- username str
- string (Required) OpenStack Swift user name
- expires float
- integer (Optional) OpenStack Swift storage object days till expiry
- authurl String
- string (Required) OpenStack Swift authentication URL
- password String
- string (Required) OpenStack Swift user password
- tenantname String
string (Required) OpenStack Swift tenant/project/account name
Note that the provided settings must be valid. The connection to the archiving integration will be validated before the configuration can be saved.
- username String
- string (Required) OpenStack Swift user name
- expires Number
- integer (Optional) OpenStack Swift storage object days till expiry
Import
Importing an existing configuration is supported:
$ pulumi import logdna:index/archive:Archive config archive
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- logdna logdna/terraform-provider-logdna
- License
- Notes
- This Pulumi package is based on the
logdna
Terraform Provider.