cloudflare.LogpushJob
Explore with Pulumi AI
Example Usage
Coming soon!
Coming soon!
Coming soon!
Coming soon!
Coming soon!
resources:
exampleLogpushJob:
type: cloudflare:LogpushJob
name: example_logpush_job
properties:
destinationConf: s3://mybucket/logs?region=us-west-2
zoneId: zone_id
dataset: http_requests
enabled: false
frequency: high
kind: edge
logpullOptions: fields=RayID,ClientIP,EdgeStartTimestamp×tamps=rfc3339
maxUploadBytes: 5e+06
maxUploadIntervalSeconds: 30
maxUploadRecords: 1000
name: example.com
outputOptions:
batch_prefix: batch_prefix
batch_suffix: batch_suffix
cve_2021_44228: true
field_delimiter: field_delimiter
field_names:
- ClientIP
- EdgeStartTimestamp
- RayID
output_type: ndjson
record_delimiter: record_delimiter
record_prefix: record_prefix
record_suffix: record_suffix
record_template: record_template
sample_rate: 0
timestamp_format: unixnano
ownershipChallenge: '00000000000000000000'
Create LogpushJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LogpushJob(name: string, args: LogpushJobArgs, opts?: CustomResourceOptions);
@overload
def LogpushJob(resource_name: str,
args: LogpushJobArgs,
opts: Optional[ResourceOptions] = None)
@overload
def LogpushJob(resource_name: str,
opts: Optional[ResourceOptions] = None,
destination_conf: Optional[str] = None,
logpull_options: Optional[str] = None,
dataset: Optional[str] = None,
enabled: Optional[bool] = None,
frequency: Optional[str] = None,
kind: Optional[str] = None,
account_id: Optional[str] = None,
max_upload_bytes: Optional[int] = None,
max_upload_interval_seconds: Optional[int] = None,
max_upload_records: Optional[int] = None,
name: Optional[str] = None,
output_options: Optional[LogpushJobOutputOptionsArgs] = None,
ownership_challenge: Optional[str] = None,
zone_id: Optional[str] = None)
func NewLogpushJob(ctx *Context, name string, args LogpushJobArgs, opts ...ResourceOption) (*LogpushJob, error)
public LogpushJob(string name, LogpushJobArgs args, CustomResourceOptions? opts = null)
public LogpushJob(String name, LogpushJobArgs args)
public LogpushJob(String name, LogpushJobArgs args, CustomResourceOptions options)
type: cloudflare:LogpushJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var logpushJobResource = new Cloudflare.LogpushJob("logpushJobResource", new()
{
DestinationConf = "string",
Dataset = "string",
Enabled = false,
Kind = "string",
AccountId = "string",
MaxUploadBytes = 0,
MaxUploadIntervalSeconds = 0,
MaxUploadRecords = 0,
Name = "string",
OutputOptions = new Cloudflare.Inputs.LogpushJobOutputOptionsArgs
{
BatchPrefix = "string",
BatchSuffix = "string",
Cve202144228 = false,
FieldDelimiter = "string",
FieldNames = new[]
{
"string",
},
OutputType = "string",
RecordDelimiter = "string",
RecordPrefix = "string",
RecordSuffix = "string",
RecordTemplate = "string",
SampleRate = 0,
TimestampFormat = "string",
},
OwnershipChallenge = "string",
ZoneId = "string",
});
example, err := cloudflare.NewLogpushJob(ctx, "logpushJobResource", &cloudflare.LogpushJobArgs{
DestinationConf: pulumi.String("string"),
Dataset: pulumi.String("string"),
Enabled: pulumi.Bool(false),
Kind: pulumi.String("string"),
AccountId: pulumi.String("string"),
MaxUploadBytes: pulumi.Int(0),
MaxUploadIntervalSeconds: pulumi.Int(0),
MaxUploadRecords: pulumi.Int(0),
Name: pulumi.String("string"),
OutputOptions: &cloudflare.LogpushJobOutputOptionsArgs{
BatchPrefix: pulumi.String("string"),
BatchSuffix: pulumi.String("string"),
Cve202144228: pulumi.Bool(false),
FieldDelimiter: pulumi.String("string"),
FieldNames: pulumi.StringArray{
pulumi.String("string"),
},
OutputType: pulumi.String("string"),
RecordDelimiter: pulumi.String("string"),
RecordPrefix: pulumi.String("string"),
RecordSuffix: pulumi.String("string"),
RecordTemplate: pulumi.String("string"),
SampleRate: pulumi.Float64(0),
TimestampFormat: pulumi.String("string"),
},
OwnershipChallenge: pulumi.String("string"),
ZoneId: pulumi.String("string"),
})
var logpushJobResource = new LogpushJob("logpushJobResource", LogpushJobArgs.builder()
.destinationConf("string")
.dataset("string")
.enabled(false)
.kind("string")
.accountId("string")
.maxUploadBytes(0)
.maxUploadIntervalSeconds(0)
.maxUploadRecords(0)
.name("string")
.outputOptions(LogpushJobOutputOptionsArgs.builder()
.batchPrefix("string")
.batchSuffix("string")
.cve202144228(false)
.fieldDelimiter("string")
.fieldNames("string")
.outputType("string")
.recordDelimiter("string")
.recordPrefix("string")
.recordSuffix("string")
.recordTemplate("string")
.sampleRate(0)
.timestampFormat("string")
.build())
.ownershipChallenge("string")
.zoneId("string")
.build());
logpush_job_resource = cloudflare.LogpushJob("logpushJobResource",
destination_conf="string",
dataset="string",
enabled=False,
kind="string",
account_id="string",
max_upload_bytes=0,
max_upload_interval_seconds=0,
max_upload_records=0,
name="string",
output_options={
"batch_prefix": "string",
"batch_suffix": "string",
"cve202144228": False,
"field_delimiter": "string",
"field_names": ["string"],
"output_type": "string",
"record_delimiter": "string",
"record_prefix": "string",
"record_suffix": "string",
"record_template": "string",
"sample_rate": 0,
"timestamp_format": "string",
},
ownership_challenge="string",
zone_id="string")
const logpushJobResource = new cloudflare.LogpushJob("logpushJobResource", {
destinationConf: "string",
dataset: "string",
enabled: false,
kind: "string",
accountId: "string",
maxUploadBytes: 0,
maxUploadIntervalSeconds: 0,
maxUploadRecords: 0,
name: "string",
outputOptions: {
batchPrefix: "string",
batchSuffix: "string",
cve202144228: false,
fieldDelimiter: "string",
fieldNames: ["string"],
outputType: "string",
recordDelimiter: "string",
recordPrefix: "string",
recordSuffix: "string",
recordTemplate: "string",
sampleRate: 0,
timestampFormat: "string",
},
ownershipChallenge: "string",
zoneId: "string",
});
type: cloudflare:LogpushJob
properties:
accountId: string
dataset: string
destinationConf: string
enabled: false
kind: string
maxUploadBytes: 0
maxUploadIntervalSeconds: 0
maxUploadRecords: 0
name: string
outputOptions:
batchPrefix: string
batchSuffix: string
cve202144228: false
fieldDelimiter: string
fieldNames:
- string
outputType: string
recordDelimiter: string
recordPrefix: string
recordSuffix: string
recordTemplate: string
sampleRate: 0
timestampFormat: string
ownershipChallenge: string
zoneId: string
LogpushJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The LogpushJob resource accepts the following input properties:
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Enabled bool
- Flag that indicates if the job is enabled.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Ownership
Challenge string - Ownership challenge token to prove destination ownership.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Enabled bool
- Flag that indicates if the job is enabled.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options LogpushJob Output Options Args - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Ownership
Challenge string - Ownership challenge token to prove destination ownership.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- enabled Boolean
- Flag that indicates if the job is enabled.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload IntegerBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload IntegerInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload IntegerRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge String - Ownership challenge token to prove destination ownership.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- enabled boolean
- Flag that indicates if the job is enabled.
- frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload numberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload numberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload numberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge string - Ownership challenge token to prove destination ownership.
- zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- destination_
conf str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- enabled bool
- Flag that indicates if the job is enabled.
- frequency str
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind str
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - logpull_
options str - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max_
upload_ intbytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max_
upload_ intinterval_ seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max_
upload_ intrecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name str
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output_
options LogpushJob Output Options Args - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership_
challenge str - Ownership challenge token to prove destination ownership.
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- enabled Boolean
- Flag that indicates if the job is enabled.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload NumberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload NumberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload NumberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options Property Map - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge String - Ownership challenge token to prove destination ownership.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Outputs
All input properties are implicitly available as output properties. Additionally, the LogpushJob resource produces the following output properties:
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Id string
- The provider-assigned unique ID for this managed resource.
- Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- id string
- The provider-assigned unique ID for this managed resource.
- last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- error_
message str - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- id str
- The provider-assigned unique ID for this managed resource.
- last_
complete str - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last_
error str - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- id String
- The provider-assigned unique ID for this managed resource.
- last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
Look up Existing LogpushJob Resource
Get an existing LogpushJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LogpushJobState, opts?: CustomResourceOptions): LogpushJob
@staticmethod
def get(resource_name: str,
id: str,
opts: Optional[ResourceOptions] = None,
account_id: Optional[str] = None,
dataset: Optional[str] = None,
destination_conf: Optional[str] = None,
enabled: Optional[bool] = None,
error_message: Optional[str] = None,
frequency: Optional[str] = None,
kind: Optional[str] = None,
last_complete: Optional[str] = None,
last_error: Optional[str] = None,
logpull_options: Optional[str] = None,
max_upload_bytes: Optional[int] = None,
max_upload_interval_seconds: Optional[int] = None,
max_upload_records: Optional[int] = None,
name: Optional[str] = None,
output_options: Optional[LogpushJobOutputOptionsArgs] = None,
ownership_challenge: Optional[str] = None,
zone_id: Optional[str] = None) -> LogpushJob
func GetLogpushJob(ctx *Context, name string, id IDInput, state *LogpushJobState, opts ...ResourceOption) (*LogpushJob, error)
public static LogpushJob Get(string name, Input<string> id, LogpushJobState? state, CustomResourceOptions? opts = null)
public static LogpushJob get(String name, Output<String> id, LogpushJobState state, CustomResourceOptions options)
resources: _: type: cloudflare:LogpushJob get: id: ${id}
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Ownership
Challenge string - Ownership challenge token to prove destination ownership.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- Account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- Dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- Destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- Enabled bool
- Flag that indicates if the job is enabled.
- Error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- Frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - Kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - Last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- Last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- Logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - Max
Upload intBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - Max
Upload intInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - Max
Upload intRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - Name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- Output
Options LogpushJob Output Options Args - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - Ownership
Challenge string - Ownership challenge token to prove destination ownership.
- Zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload IntegerBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload IntegerInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload IntegerRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge String - Ownership challenge token to prove destination ownership.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- account
Id string - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset string
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf string - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled boolean
- Flag that indicates if the job is enabled.
- error
Message string - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency string
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind string
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete string - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error string - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options string - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload numberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload numberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload numberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name string
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options LogpushJob Output Options - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge string - Ownership challenge token to prove destination ownership.
- zone
Id string - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- account_
id str - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset str
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination_
conf str - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled bool
- Flag that indicates if the job is enabled.
- error_
message str - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency str
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind str
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last_
complete str - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last_
error str - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull_
options str - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max_
upload_ intbytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max_
upload_ intinterval_ seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max_
upload_ intrecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name str
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output_
options LogpushJob Output Options Args - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership_
challenge str - Ownership challenge token to prove destination ownership.
- zone_
id str - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
- account
Id String - The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
- dataset String
- Name of the dataset. A list of supported datasets can be found on the Developer Docs.
- destination
Conf String - Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included.
- enabled Boolean
- Flag that indicates if the job is enabled.
- error
Message String - If not null, the job is currently failing. Failures are usually repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the errormessage and lasterror are set to null.
- frequency String
- This field is deprecated. Please use
max_upload_*
parameters instead. The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. Available values: "high", "low". - kind String
- The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs. Currently, Edge Log Delivery is only supported for the
http_requests
dataset. Available values: "edge". - last
Complete String - Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty.
- last
Error String - Records the last time the job failed. If not null, the job is currently failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field.
- logpull
Options String - This field is deprecated. Use
output_options
instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - max
Upload NumberBytes - The maximum uncompressed file size of a batch of logs. This setting value must be between
5 MB
and1 GB
, or0
to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. This parameter is not available for jobs withedge
as its kind. - max
Upload NumberInterval Seconds - The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or
0
to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. This parameter is only used for jobs withedge
as its kind. - max
Upload NumberRecords - The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or
0
to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. This parameter is not available for jobs withedge
as its kind. - name String
- Optional human readable job name. Not unique. Cloudflare suggests that you set this to a meaningful string, like the domain name, to make it easier to identify your job.
- output
Options Property Map - The structured replacement for
logpull_options
. When including this field, thelogpull_option
field will be ignored. - ownership
Challenge String - Ownership challenge token to prove destination ownership.
- zone
Id String - The Zone ID to use for this endpoint. Mutually exclusive with the Account ID.
Supporting Types
LogpushJobOutputOptions, LogpushJobOutputOptionsArgs
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - Field
Names List<string> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record.
- Record
Suffix string - String to be appended after each record.
- Record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- Batch
Prefix string - String to be prepended before each batch.
- Batch
Suffix string - String to be appended after each batch.
- Cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - Field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - Field
Names []string - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- Output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - Record
Delimiter string - String to be inserted in-between the records as separator.
- Record
Prefix string - String to be prepended before each record.
- Record
Suffix string - String to be appended after each record.
- Record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - Sample
Rate float64 - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - Timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter String - String to join fields. This field be ignored when
record_template
is set. - field
Names List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record.
- record
Suffix String - String to be appended after each record.
- record
Template String - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate Double - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix string - String to be prepended before each batch.
- batch
Suffix string - String to be appended after each batch.
- cve202144228 boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter string - String to join fields. This field be ignored when
record_template
is set. - field
Names string[] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type string - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter string - String to be inserted in-between the records as separator.
- record
Prefix string - String to be prepended before each record.
- record
Suffix string - String to be appended after each record.
- record
Template string - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format string - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch_
prefix str - String to be prepended before each batch.
- batch_
suffix str - String to be appended after each batch.
- cve202144228 bool
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field_
delimiter str - String to join fields. This field be ignored when
record_template
is set. - field_
names Sequence[str] - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output_
type str - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record_
delimiter str - String to be inserted in-between the records as separator.
- record_
prefix str - String to be prepended before each record.
- record_
suffix str - String to be appended after each record.
- record_
template str - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample_
rate float - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp_
format str - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
- batch
Prefix String - String to be prepended before each batch.
- batch
Suffix String - String to be appended after each batch.
- cve202144228 Boolean
- If set to true, will cause all occurrences of
${
in the generated files to be replaced withx{
. - field
Delimiter String - String to join fields. This field be ignored when
record_template
is set. - field
Names List<String> - List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in.
- output
Type String - Specifies the output type, such as
ndjson
orcsv
. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. Available values: "ndjson", "csv". - record
Delimiter String - String to be inserted in-between the records as separator.
- record
Prefix String - String to be prepended before each record.
- record
Suffix String - String to be appended after each record.
- record
Template String - String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in
field_names
as well, otherwise they will end up as null. Format as a Gotext/template
without any standard functions, like conditionals, loops, sub-templates, etc. - sample
Rate Number - Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current
sample_interval
of the data. - timestamp
Format String - String to specify the format for timestamps, such as
unixnano
,unix
, orrfc3339
. Available values: "unixnano", "unix", "rfc3339".
Import
$ pulumi import cloudflare:index/logpushJob:LogpushJob example '<{accounts|zones}/{account_id|zone_id}>/<job_id>'
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the
cloudflare
Terraform Provider.