Cloudflare v6.10.1 published on Wednesday, Oct 22, 2025 by Pulumi
cloudflare.getLogpushDatasetJob
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as cloudflare from "@pulumi/cloudflare";
const exampleLogpushDatasetJob = cloudflare.getLogpushDatasetJob({
    datasetId: "gateway_dns",
    accountId: "account_id",
    zoneId: "zone_id",
});
import pulumi
import pulumi_cloudflare as cloudflare
example_logpush_dataset_job = cloudflare.get_logpush_dataset_job(dataset_id="gateway_dns",
    account_id="account_id",
    zone_id="zone_id")
package main
import (
	"github.com/pulumi/pulumi-cloudflare/sdk/v6/go/cloudflare"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := cloudflare.GetLogpushDatasetJob(ctx, &cloudflare.GetLogpushDatasetJobArgs{
			DatasetId: pulumi.StringRef("gateway_dns"),
			AccountId: pulumi.StringRef("account_id"),
			ZoneId:    pulumi.StringRef("zone_id"),
		}, nil)
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Cloudflare = Pulumi.Cloudflare;
return await Deployment.RunAsync(() => 
{
    var exampleLogpushDatasetJob = Cloudflare.GetLogpushDatasetJob.Invoke(new()
    {
        DatasetId = "gateway_dns",
        AccountId = "account_id",
        ZoneId = "zone_id",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.cloudflare.CloudflareFunctions;
import com.pulumi.cloudflare.inputs.GetLogpushDatasetJobArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        final var exampleLogpushDatasetJob = CloudflareFunctions.getLogpushDatasetJob(GetLogpushDatasetJobArgs.builder()
            .datasetId("gateway_dns")
            .accountId("account_id")
            .zoneId("zone_id")
            .build());
    }
}
variables:
  exampleLogpushDatasetJob:
    fn::invoke:
      function: cloudflare:getLogpushDatasetJob
      arguments:
        datasetId: gateway_dns
        accountId: account_id
        zoneId: zone_id
Using getLogpushDatasetJob
Two invocation forms are available. The direct form accepts plain arguments and either blocks until the result value is available, or returns a Promise-wrapped result. The output form accepts Input-wrapped arguments and returns an Output-wrapped result.
function getLogpushDatasetJob(args: GetLogpushDatasetJobArgs, opts?: InvokeOptions): Promise<GetLogpushDatasetJobResult>
function getLogpushDatasetJobOutput(args: GetLogpushDatasetJobOutputArgs, opts?: InvokeOptions): Output<GetLogpushDatasetJobResult>def get_logpush_dataset_job(account_id: Optional[str] = None,
                            dataset_id: Optional[str] = None,
                            zone_id: Optional[str] = None,
                            opts: Optional[InvokeOptions] = None) -> GetLogpushDatasetJobResult
def get_logpush_dataset_job_output(account_id: Optional[pulumi.Input[str]] = None,
                            dataset_id: Optional[pulumi.Input[str]] = None,
                            zone_id: Optional[pulumi.Input[str]] = None,
                            opts: Optional[InvokeOptions] = None) -> Output[GetLogpushDatasetJobResult]func GetLogpushDatasetJob(ctx *Context, args *GetLogpushDatasetJobArgs, opts ...InvokeOption) (*GetLogpushDatasetJobResult, error)
func GetLogpushDatasetJobOutput(ctx *Context, args *GetLogpushDatasetJobOutputArgs, opts ...InvokeOption) GetLogpushDatasetJobResultOutput> Note: This function is named GetLogpushDatasetJob in the Go SDK.
public static class GetLogpushDatasetJob 
{
    public static Task<GetLogpushDatasetJobResult> InvokeAsync(GetLogpushDatasetJobArgs args, InvokeOptions? opts = null)
    public static Output<GetLogpushDatasetJobResult> Invoke(GetLogpushDatasetJobInvokeArgs args, InvokeOptions? opts = null)
}public static CompletableFuture<GetLogpushDatasetJobResult> getLogpushDatasetJob(GetLogpushDatasetJobArgs args, InvokeOptions options)
public static Output<GetLogpushDatasetJobResult> getLogpushDatasetJob(GetLogpushDatasetJobArgs args, InvokeOptions options)
fn::invoke:
  function: cloudflare:index/getLogpushDatasetJob:getLogpushDatasetJob
  arguments:
    # arguments dictionaryThe following arguments are supported:
- AccountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- DatasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- ZoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- AccountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- DatasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- ZoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- accountId String
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- datasetId String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- zoneId String
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- accountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- datasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- zoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- account_id str
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset_id str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- zone_id str
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- accountId String
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- datasetId String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- zoneId String
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
getLogpushDatasetJob Result
The following output properties are available:
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- DatasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- ErrorMessage string
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- Id int
- Unique id of the job.
- Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- LastComplete string
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- LastError string
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- LogpullOptions string
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- MaxUpload intRecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- Name string
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- OutputOptions GetLogpush Dataset Job Output Options 
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- AccountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- ZoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- DatasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- ErrorMessage string
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- Id int
- Unique id of the job.
- Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- LastComplete string
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- LastError string
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- LogpullOptions string
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- MaxUpload intRecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- Name string
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- OutputOptions GetLogpush Dataset Job Output Options 
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- AccountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- ZoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- datasetId String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- errorMessage String
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- id Integer
- Unique id of the job.
- kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- lastComplete String
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- lastError String
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpullOptions String
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- maxUpload IntegerBytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- maxUpload IntegerInterval Seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- maxUpload IntegerRecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- name String
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- outputOptions GetLogpush Dataset Job Output Options 
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- accountId String
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zoneId String
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- datasetId string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- destinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled boolean
- Flag that indicates if the job is enabled.
- errorMessage string
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency string
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- id number
- Unique id of the job.
- kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- lastComplete string
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- lastError string
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpullOptions string
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- maxUpload numberBytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- maxUpload numberInterval Seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- maxUpload numberRecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- name string
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- outputOptions GetLogpush Dataset Job Output Options 
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- accountId string
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zoneId string
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- dataset_id str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- destination_conf str
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled bool
- Flag that indicates if the job is enabled.
- error_message str
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency str
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- id int
- Unique id of the job.
- kind str
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- last_complete str
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last_error str
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull_options str
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- max_upload_ intbytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- max_upload_ intinterval_ seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- max_upload_ intrecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- name str
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output_options GetLogpush Dataset Job Output Options 
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- account_id str
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zone_id str
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- datasetId String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs. Available values: "accessrequests", "auditlogs", "auditlogsv2", "bisouseractions", "casbfindings", "devicepostureresults", "dlpforensiccopies", "dnsfirewalllogs", "dnslogs", "emailsecurityalerts", "firewallevents", "gatewaydns", "gatewayhttp", "gatewaynetwork", "httprequests", "magicidsdetections", "nelreports", "networkanalyticslogs", "pageshieldevents", "sinkholehttplogs", "spectrumevents", "sshlogs", "workerstraceevents", "zarazevents", "zerotrustnetworksessions".
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- errorMessage String
- If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use max_upload_*parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low".
- id Number
- Unique id of the job.
- kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). Available values: "", "edge".
- lastComplete String
- Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- lastError String
- Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpullOptions String
- This field is deprecated. Use output_optionsinstead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
- maxUpload NumberBytes 
- The maximum uncompressed file size of a batch of logs. This setting value must be between 5 MBand1 GB, or0to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size.
- maxUpload NumberInterval Seconds 
- The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or 0to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this.
- maxUpload NumberRecords 
- The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or 0to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this.
- name String
- Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- outputOptions Property Map
- The structured replacement for logpull_options. When including this field, thelogpull_optionfield will be ignored.
- accountId String
- The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- zoneId String
- The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Supporting Types
GetLogpushDatasetJobOutputOptions     
- BatchPrefix string
- String to be prepended before each batch.
- BatchSuffix string
- String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- FieldDelimiter string
- String to join fields. This field be ignored when record_templateis set.
- FieldNames List<string>
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- OutputType string
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- RecordDelimiter string
- String to be inserted in-between the records as separator.
- RecordPrefix string
- String to be prepended before each record.
- RecordSuffix string
- String to be appended after each record.
- RecordTemplate string
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- SampleRate double
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- TimestampFormat string
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
- BatchPrefix string
- String to be prepended before each batch.
- BatchSuffix string
- String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- FieldDelimiter string
- String to join fields. This field be ignored when record_templateis set.
- FieldNames []string
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- OutputType string
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- RecordDelimiter string
- String to be inserted in-between the records as separator.
- RecordPrefix string
- String to be prepended before each record.
- RecordSuffix string
- String to be appended after each record.
- RecordTemplate string
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- SampleRate float64
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- TimestampFormat string
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
- batchPrefix String
- String to be prepended before each batch.
- batchSuffix String
- String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- fieldDelimiter String
- String to join fields. This field be ignored when record_templateis set.
- fieldNames List<String>
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- outputType String
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- recordDelimiter String
- String to be inserted in-between the records as separator.
- recordPrefix String
- String to be prepended before each record.
- recordSuffix String
- String to be appended after each record.
- recordTemplate String
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- sampleRate Double
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- timestampFormat String
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
- batchPrefix string
- String to be prepended before each batch.
- batchSuffix string
- String to be appended after each batch.
- cve202144228 boolean
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- fieldDelimiter string
- String to join fields. This field be ignored when record_templateis set.
- fieldNames string[]
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- outputType string
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- recordDelimiter string
- String to be inserted in-between the records as separator.
- recordPrefix string
- String to be prepended before each record.
- recordSuffix string
- String to be appended after each record.
- recordTemplate string
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- sampleRate number
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- timestampFormat string
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
- batch_prefix str
- String to be prepended before each batch.
- batch_suffix str
- String to be appended after each batch.
- cve202144228 bool
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- field_delimiter str
- String to join fields. This field be ignored when record_templateis set.
- field_names Sequence[str]
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output_type str
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- record_delimiter str
- String to be inserted in-between the records as separator.
- record_prefix str
- String to be prepended before each record.
- record_suffix str
- String to be appended after each record.
- record_template str
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- sample_rate float
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- timestamp_format str
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
- batchPrefix String
- String to be prepended before each batch.
- batchSuffix String
- String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of ${in the generated files to be replaced withx{.
- fieldDelimiter String
- String to join fields. This field be ignored when record_templateis set.
- fieldNames List<String>
- List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- outputType String
- Specifies the output type, such as ndjsonorcsv. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv".
- recordDelimiter String
- String to be inserted in-between the records as separator.
- recordPrefix String
- String to be prepended before each record.
- recordSuffix String
- String to be appended after each record.
- recordTemplate String
- String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in field_namesas well, otherwise they will end up as null. Format as a Gotext/templatewithout any standard functions, like conditionals, loops, sub-templates, etc.
- sampleRate Number
- Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current sample_intervalof the data.
- timestampFormat String
- String to specify the format for timestamps, such as unixnano,unix, orrfc3339. Available values: "unixnano", "unix", "rfc3339".
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the cloudflareTerraform Provider.
