Cloudflare v6.1.2 published on Monday, Apr 28, 2025 by Pulumi
cloudflare.getLogpushDatasetJob
Explore with Pulumi AI
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as cloudflare from "@pulumi/cloudflare";
const exampleLogpushDatasetJob = cloudflare.getLogpushDatasetJob({
datasetId: "gateway_dns",
accountId: "account_id",
zoneId: "zone_id",
});
import pulumi
import pulumi_cloudflare as cloudflare
example_logpush_dataset_job = cloudflare.get_logpush_dataset_job(dataset_id="gateway_dns",
account_id="account_id",
zone_id="zone_id")
package main
import (
"github.com/pulumi/pulumi-cloudflare/sdk/v6/go/cloudflare"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
pulumi.Run(func(ctx *pulumi.Context) error {
_, err := cloudflare.GetLogpushDatasetJob(ctx, &cloudflare.GetLogpushDatasetJobArgs{
DatasetId: "gateway_dns",
AccountId: pulumi.StringRef("account_id"),
ZoneId: pulumi.StringRef("zone_id"),
}, nil)
if err != nil {
return err
}
return nil
})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Cloudflare = Pulumi.Cloudflare;
return await Deployment.RunAsync(() =>
{
var exampleLogpushDatasetJob = Cloudflare.GetLogpushDatasetJob.Invoke(new()
{
DatasetId = "gateway_dns",
AccountId = "account_id",
ZoneId = "zone_id",
});
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.cloudflare.CloudflareFunctions;
import com.pulumi.cloudflare.inputs.GetLogpushDatasetJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
public static void main(String[] args) {
Pulumi.run(App::stack);
}
public static void stack(Context ctx) {
final var exampleLogpushDatasetJob = CloudflareFunctions.getLogpushDatasetJob(GetLogpushDatasetJobArgs.builder()
.datasetId("gateway_dns")
.accountId("account_id")
.zoneId("zone_id")
.build());
}
}
variables:
exampleLogpushDatasetJob:
fn::invoke:
function: cloudflare:getLogpushDatasetJob
arguments:
datasetId: gateway_dns
accountId: account_id
zoneId: zone_id
Using getLogpushDatasetJob
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getLogpushDatasetJob(args: GetLogpushDatasetJobArgs, opts?: InvokeOptions): Promise<GetLogpushDatasetJobResult>
function getLogpushDatasetJobOutput(args: GetLogpushDatasetJobOutputArgs, opts?: InvokeOptions): Output<GetLogpushDatasetJobResult>
def get_logpush_dataset_job(account_id: Optional[str] = None,
dataset_id: Optional[str] = None,
zone_id: Optional[str] = None,
opts: Optional[InvokeOptions] = None) -> GetLogpushDatasetJobResult
def get_logpush_dataset_job_output(account_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
zone_id: Optional[pulumi.Input[str]] = None,
opts: Optional[InvokeOptions] = None) -> Output[GetLogpushDatasetJobResult]
func GetLogpushDatasetJob(ctx *Context, args *GetLogpushDatasetJobArgs, opts ...InvokeOption) (*GetLogpushDatasetJobResult, error)
func GetLogpushDatasetJobOutput(ctx *Context, args *GetLogpushDatasetJobOutputArgs, opts ...InvokeOption) GetLogpushDatasetJobResultOutput
> Note: This function is named GetLogpushDatasetJob
in the Go SDK.
public static class GetLogpushDatasetJob
{
public static Task<GetLogpushDatasetJobResult> InvokeAsync(GetLogpushDatasetJobArgs args, InvokeOptions? opts = null)
public static Output<GetLogpushDatasetJobResult> Invoke(GetLogpushDatasetJobInvokeArgs args, InvokeOptions? opts = null)
}
public static CompletableFuture<GetLogpushDatasetJobResult> getLogpushDatasetJob(GetLogpushDatasetJobArgs args, InvokeOptions options)
public static Output<GetLogpushDatasetJobResult> getLogpushDatasetJob(GetLogpushDatasetJobArgs args, InvokeOptions options)
fn::invoke:
function: cloudflare:index/getLogpushDatasetJob:getLogpushDatasetJob
arguments:
# arguments dictionary
The following arguments are supported:
- Dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset
Id String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset_
id str - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset
Id String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
getLogpushDatasetJob Result
The following output properties are available:
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Id int
- Unique id of the job.
- Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options GetLogpush Dataset Job Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Id int
- Unique id of the job.
- Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options GetLogpush Dataset Job Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- dataset
Id String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id Integer
- Unique id of the job.
- kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload IntegerBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload IntegerInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload IntegerRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options GetLogpush Dataset Job Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- dataset
Id string - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled boolean
- Flag that indicates if the job is enabled.
- error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id number
- Unique id of the job.
- kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload numberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload numberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload numberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options GetLogpush Dataset Job Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- dataset_
id str - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination_
conf str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled bool
- Flag that indicates if the job is enabled.
- error_
message str - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency str
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id int
- Unique id of the job.
- kind str
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last_
complete str - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last_
error str - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull_
options str - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max_
upload_ intbytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max_
upload_ intinterval_ seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max_
upload_ intrecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name str
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output_
options GetLogpush Dataset Job Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- dataset
Id String - Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - id Number
- Unique id of the job.
- kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload NumberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload NumberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload NumberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options Property Map - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Supporting Types
GetLogpushDatasetJobOutputOptions
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - Field
Names List<string> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record.
- Record
Suffix string - String to be appended after each record.
- Record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - Field
Names []string - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record.
- Record
Suffix string - String to be appended after each record.
- Record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate float64 - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter String - String to join fields. This field be ignored when
record_template
is set. - field
Names List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record.
- record
Suffix String - String to be appended after each record.
- record
Template String - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate Double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix string - String to be prepended before each batch.
- batch
Suffix string - String to be appended after each batch.
- cve202144228 boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - field
Names string[] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter string - String to be inserted in-between the records as separator.
- record
Prefix string - String to be prepended before each record.
- record
Suffix string - String to be appended after each record.
- record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch_
prefix str - String to be prepended before each batch.
- batch_
suffix str - String to be appended after each batch.
- cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field_
delimiter str - String to join fields. This field be ignored when
record_template
is set. - field_
names Sequence[str] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output_
type str - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record_
delimiter str - String to be inserted in-between the records as separator.
- record_
prefix str - String to be prepended before each record.
- record_
suffix str - String to be appended after each record.
- record_
template str - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample_
rate float - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp_
format str - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter String - String to join fields. This field be ignored when
record_template
is set. - field
Names List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record.
- record
Suffix String - String to be appended after each record.
- record
Template String - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate Number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
cloudflare
Terraform Provider.