cloudflare.LogpushJob
Explore with Pulumi AI
Example Usage
Create LogpushJob Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new LogpushJob(name: string, args: LogpushJobArgs, opts?: CustomResourceOptions);@overload
def LogpushJob(resource_name: str,
               args: LogpushJobArgs,
               opts: Optional[ResourceOptions] = None)
@overload
def LogpushJob(resource_name: str,
               opts: Optional[ResourceOptions] = None,
               destination_conf: Optional[str] = None,
               dataset: Optional[str] = None,
               kind: Optional[str] = None,
               enabled: Optional[bool] = None,
               filter: Optional[str] = None,
               frequency: Optional[str] = None,
               account_id: Optional[str] = None,
               logpull_options: Optional[str] = None,
               max_upload_bytes: Optional[int] = None,
               max_upload_interval_seconds: Optional[int] = None,
               max_upload_records: Optional[int] = None,
               name: Optional[str] = None,
               output_options: Optional[LogpushJobOutputOptionsArgs] = None,
               ownership_challenge: Optional[str] = None,
               zone_id: Optional[str] = None)func NewLogpushJob(ctx *Context, name string, args LogpushJobArgs, opts ...ResourceOption) (*LogpushJob, error)public LogpushJob(string name, LogpushJobArgs args, CustomResourceOptions? opts = null)
public LogpushJob(String name, LogpushJobArgs args)
public LogpushJob(String name, LogpushJobArgs args, CustomResourceOptions options)
type: cloudflare:LogpushJob
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args LogpushJobArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
Constructor example
The following reference example uses placeholder values for all input properties.
var logpushJobResource = new Cloudflare.LogpushJob("logpushJobResource", new()
{
    DestinationConf = "string",
    Dataset = "string",
    Kind = "string",
    Enabled = false,
    Filter = "string",
    AccountId = "string",
    LogpullOptions = "string",
    MaxUploadBytes = 0,
    MaxUploadIntervalSeconds = 0,
    MaxUploadRecords = 0,
    Name = "string",
    OutputOptions = new Cloudflare.Inputs.LogpushJobOutputOptionsArgs
    {
        BatchPrefix = "string",
        BatchSuffix = "string",
        Cve20214428 = false,
        FieldDelimiter = "string",
        FieldNames = new[]
        {
            "string",
        },
        OutputType = "string",
        RecordDelimiter = "string",
        RecordPrefix = "string",
        RecordSuffix = "string",
        RecordTemplate = "string",
        SampleRate = 0,
        TimestampFormat = "string",
    },
    OwnershipChallenge = "string",
    ZoneId = "string",
});
example, err := cloudflare.NewLogpushJob(ctx, "logpushJobResource", &cloudflare.LogpushJobArgs{
	DestinationConf:          pulumi.String("string"),
	Dataset:                  pulumi.String("string"),
	Kind:                     pulumi.String("string"),
	Enabled:                  pulumi.Bool(false),
	Filter:                   pulumi.String("string"),
	AccountId:                pulumi.String("string"),
	LogpullOptions:           pulumi.String("string"),
	MaxUploadBytes:           pulumi.Int(0),
	MaxUploadIntervalSeconds: pulumi.Int(0),
	MaxUploadRecords:         pulumi.Int(0),
	Name:                     pulumi.String("string"),
	OutputOptions: &cloudflare.LogpushJobOutputOptionsArgs{
		BatchPrefix:    pulumi.String("string"),
		BatchSuffix:    pulumi.String("string"),
		Cve20214428:    pulumi.Bool(false),
		FieldDelimiter: pulumi.String("string"),
		FieldNames: pulumi.StringArray{
			pulumi.String("string"),
		},
		OutputType:      pulumi.String("string"),
		RecordDelimiter: pulumi.String("string"),
		RecordPrefix:    pulumi.String("string"),
		RecordSuffix:    pulumi.String("string"),
		RecordTemplate:  pulumi.String("string"),
		SampleRate:      pulumi.Float64(0),
		TimestampFormat: pulumi.String("string"),
	},
	OwnershipChallenge: pulumi.String("string"),
	ZoneId:             pulumi.String("string"),
})
var logpushJobResource = new LogpushJob("logpushJobResource", LogpushJobArgs.builder()
    .destinationConf("string")
    .dataset("string")
    .kind("string")
    .enabled(false)
    .filter("string")
    .accountId("string")
    .logpullOptions("string")
    .maxUploadBytes(0)
    .maxUploadIntervalSeconds(0)
    .maxUploadRecords(0)
    .name("string")
    .outputOptions(LogpushJobOutputOptionsArgs.builder()
        .batchPrefix("string")
        .batchSuffix("string")
        .cve20214428(false)
        .fieldDelimiter("string")
        .fieldNames("string")
        .outputType("string")
        .recordDelimiter("string")
        .recordPrefix("string")
        .recordSuffix("string")
        .recordTemplate("string")
        .sampleRate(0)
        .timestampFormat("string")
        .build())
    .ownershipChallenge("string")
    .zoneId("string")
    .build());
logpush_job_resource = cloudflare.LogpushJob("logpushJobResource",
    destination_conf="string",
    dataset="string",
    kind="string",
    enabled=False,
    filter="string",
    account_id="string",
    logpull_options="string",
    max_upload_bytes=0,
    max_upload_interval_seconds=0,
    max_upload_records=0,
    name="string",
    output_options={
        "batch_prefix": "string",
        "batch_suffix": "string",
        "cve20214428": False,
        "field_delimiter": "string",
        "field_names": ["string"],
        "output_type": "string",
        "record_delimiter": "string",
        "record_prefix": "string",
        "record_suffix": "string",
        "record_template": "string",
        "sample_rate": 0,
        "timestamp_format": "string",
    },
    ownership_challenge="string",
    zone_id="string")
const logpushJobResource = new cloudflare.LogpushJob("logpushJobResource", {
    destinationConf: "string",
    dataset: "string",
    kind: "string",
    enabled: false,
    filter: "string",
    accountId: "string",
    logpullOptions: "string",
    maxUploadBytes: 0,
    maxUploadIntervalSeconds: 0,
    maxUploadRecords: 0,
    name: "string",
    outputOptions: {
        batchPrefix: "string",
        batchSuffix: "string",
        cve20214428: false,
        fieldDelimiter: "string",
        fieldNames: ["string"],
        outputType: "string",
        recordDelimiter: "string",
        recordPrefix: "string",
        recordSuffix: "string",
        recordTemplate: "string",
        sampleRate: 0,
        timestampFormat: "string",
    },
    ownershipChallenge: "string",
    zoneId: "string",
});
type: cloudflare:LogpushJob
properties:
    accountId: string
    dataset: string
    destinationConf: string
    enabled: false
    filter: string
    kind: string
    logpullOptions: string
    maxUploadBytes: 0
    maxUploadIntervalSeconds: 0
    maxUploadRecords: 0
    name: string
    outputOptions:
        batchPrefix: string
        batchSuffix: string
        cve20214428: false
        fieldDelimiter: string
        fieldNames:
            - string
        outputType: string
        recordDelimiter: string
        recordPrefix: string
        recordSuffix: string
        recordTemplate: string
        sampleRate: 0
        timestampFormat: string
    ownershipChallenge: string
    zoneId: string
LogpushJob Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The LogpushJob resource accepts the following input properties:
- Dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- AccountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- Kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- LogpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- MaxUpload intRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- OutputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- OwnershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- ZoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- Dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- AccountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- Kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- LogpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- MaxUpload intRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- OutputOptions LogpushJob Output Options Args 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- OwnershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- ZoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset String
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- accountId String
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind String
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions String
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload IntegerBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload IntegerInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload IntegerRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- outputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge String
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId String
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- accountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- enabled boolean
- Whether to enable the job.
- filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload numberBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload numberInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload numberRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name string
- The name of the logpush job to create.
- outputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset str
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destination_conf str
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- account_id str
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- enabled bool
- Whether to enable the job.
- filter str
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency str
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind str
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpull_options str
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max_upload_ intbytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max_upload_ intinterval_ seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max_upload_ intrecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name str
- The name of the logpush job to create.
- output_options LogpushJob Output Options Args 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership_challenge str
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone_id str
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset String
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- accountId String
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind String
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions String
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload NumberBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload NumberInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload NumberRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- outputOptions Property Map
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge String
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId String
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
Outputs
All input properties are implicitly available as output properties. Additionally, the LogpushJob resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing LogpushJob Resource
Get an existing LogpushJob resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: LogpushJobState, opts?: CustomResourceOptions): LogpushJob@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        account_id: Optional[str] = None,
        dataset: Optional[str] = None,
        destination_conf: Optional[str] = None,
        enabled: Optional[bool] = None,
        filter: Optional[str] = None,
        frequency: Optional[str] = None,
        kind: Optional[str] = None,
        logpull_options: Optional[str] = None,
        max_upload_bytes: Optional[int] = None,
        max_upload_interval_seconds: Optional[int] = None,
        max_upload_records: Optional[int] = None,
        name: Optional[str] = None,
        output_options: Optional[LogpushJobOutputOptionsArgs] = None,
        ownership_challenge: Optional[str] = None,
        zone_id: Optional[str] = None) -> LogpushJobfunc GetLogpushJob(ctx *Context, name string, id IDInput, state *LogpushJobState, opts ...ResourceOption) (*LogpushJob, error)public static LogpushJob Get(string name, Input<string> id, LogpushJobState? state, CustomResourceOptions? opts = null)public static LogpushJob get(String name, Output<String> id, LogpushJobState state, CustomResourceOptions options)resources:  _:    type: cloudflare:LogpushJob    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- AccountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- Dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- Kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- LogpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- MaxUpload intRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- OutputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- OwnershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- ZoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- AccountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- Dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- DestinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- Enabled bool
- Whether to enable the job.
- Filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- Frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- Kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- LogpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- MaxUpload intBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- MaxUpload intInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- MaxUpload intRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- Name string
- The name of the logpush job to create.
- OutputOptions LogpushJob Output Options Args 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- OwnershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- ZoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- accountId String
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset String
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind String
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions String
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload IntegerBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload IntegerInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload IntegerRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- outputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge String
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId String
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- accountId string
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset string
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf string
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled boolean
- Whether to enable the job.
- filter string
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency string
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind string
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions string
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload numberBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload numberInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload numberRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name string
- The name of the logpush job to create.
- outputOptions LogpushJob Output Options 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge string
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId string
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- account_id str
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset str
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destination_conf str
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled bool
- Whether to enable the job.
- filter str
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency str
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind str
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpull_options str
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- max_upload_ intbytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- max_upload_ intinterval_ seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- max_upload_ intrecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name str
- The name of the logpush job to create.
- output_options LogpushJob Output Options Args 
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownership_challenge str
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zone_id str
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
- accountId String
- The account identifier to target for the resource. Must provide only one of account_id,zone_id.
- dataset String
- The kind of the dataset to use with the logpush job. Available values: access_requests,casb_findings,firewall_events,http_requests,spectrum_events,nel_reports,audit_logs,gateway_dns,gateway_http,gateway_network,dns_logs,network_analytics_logs,workers_trace_events,device_posture_results,zero_trust_network_sessions,magic_ids_detections,page_shield_events,dlp_forensic_copies.
- destinationConf String
- Uniquely identifies a resource (such as an s3 bucket) where data will be pushed. Additional configuration parameters supported by the destination may be included. See Logpush destination documentation.
- enabled Boolean
- Whether to enable the job.
- filter String
- Use filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
- frequency String
- A higher frequency will result in logs being pushed on faster with smaller files. lowfrequency will push logs less often with larger files. Available values:high,low. Defaults tohigh.
- kind String
- The kind of logpush job to create. Available values: edge,instant-logs,"".
- logpullOptions String
- Configuration string for the Logshare API. It specifies things like requested fields and timestamp formats. See Logpush options documentation.
- maxUpload NumberBytes 
- The maximum uncompressed file size of a batch of logs. Value must be between 5MB and 1GB.
- maxUpload NumberInterval Seconds 
- The maximum interval in seconds for log batches. Value must be between 30 and 300.
- maxUpload NumberRecords 
- The maximum number of log lines per batch. Value must be between 1000 and 1,000,000.
- name String
- The name of the logpush job to create.
- outputOptions Property Map
- Structured replacement for logpulloptions. When including this field, the logpulloption field will be ignored.
- ownershipChallenge String
- Ownership challenge token to prove destination ownership, required when destination is Amazon S3, Google Cloud Storage, Microsoft Azure or Sumo Logic. See Developer documentation.
- zoneId String
- The zone identifier to target for the resource. Must provide only one of account_id,zone_id.
Supporting Types
LogpushJobOutputOptions, LogpushJobOutputOptionsArgs        
- BatchPrefix string
- String to be prepended before each batch.
- BatchSuffix string
- String to be appended after each batch.
- Cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- FieldDelimiter string
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- FieldNames List<string>
- List of field names to be included in the Logpush output.
- OutputType string
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- RecordDelimiter string
- String to be inserted in-between the records as separator.
- RecordPrefix string
- String to be prepended before each record. Defaults to {.
- RecordSuffix string
- String to be appended after each record. Defaults to }.
- RecordTemplate string
- String to use as template for each record instead of the default comma-separated list.
- SampleRate double
- Specifies the sampling rate. Defaults to 1.
- TimestampFormat string
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
- BatchPrefix string
- String to be prepended before each batch.
- BatchSuffix string
- String to be appended after each batch.
- Cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- FieldDelimiter string
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- FieldNames []string
- List of field names to be included in the Logpush output.
- OutputType string
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- RecordDelimiter string
- String to be inserted in-between the records as separator.
- RecordPrefix string
- String to be prepended before each record. Defaults to {.
- RecordSuffix string
- String to be appended after each record. Defaults to }.
- RecordTemplate string
- String to use as template for each record instead of the default comma-separated list.
- SampleRate float64
- Specifies the sampling rate. Defaults to 1.
- TimestampFormat string
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
- batchPrefix String
- String to be prepended before each batch.
- batchSuffix String
- String to be appended after each batch.
- cve20214428 Boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- fieldDelimiter String
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- fieldNames List<String>
- List of field names to be included in the Logpush output.
- outputType String
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- recordDelimiter String
- String to be inserted in-between the records as separator.
- recordPrefix String
- String to be prepended before each record. Defaults to {.
- recordSuffix String
- String to be appended after each record. Defaults to }.
- recordTemplate String
- String to use as template for each record instead of the default comma-separated list.
- sampleRate Double
- Specifies the sampling rate. Defaults to 1.
- timestampFormat String
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
- batchPrefix string
- String to be prepended before each batch.
- batchSuffix string
- String to be appended after each batch.
- cve20214428 boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- fieldDelimiter string
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- fieldNames string[]
- List of field names to be included in the Logpush output.
- outputType string
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- recordDelimiter string
- String to be inserted in-between the records as separator.
- recordPrefix string
- String to be prepended before each record. Defaults to {.
- recordSuffix string
- String to be appended after each record. Defaults to }.
- recordTemplate string
- String to use as template for each record instead of the default comma-separated list.
- sampleRate number
- Specifies the sampling rate. Defaults to 1.
- timestampFormat string
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
- batch_prefix str
- String to be prepended before each batch.
- batch_suffix str
- String to be appended after each batch.
- cve20214428 bool
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- field_delimiter str
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- field_names Sequence[str]
- List of field names to be included in the Logpush output.
- output_type str
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- record_delimiter str
- String to be inserted in-between the records as separator.
- record_prefix str
- String to be prepended before each record. Defaults to {.
- record_suffix str
- String to be appended after each record. Defaults to }.
- record_template str
- String to use as template for each record instead of the default comma-separated list.
- sample_rate float
- Specifies the sampling rate. Defaults to 1.
- timestamp_format str
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
- batchPrefix String
- String to be prepended before each batch.
- batchSuffix String
- String to be appended after each batch.
- cve20214428 Boolean
- Mitigation for CVE-2021-44228. If set to true, will cause all occurrences of ${ in the generated files to be replaced with x{. Defaults to false.
- fieldDelimiter String
- String to join fields. This field be ignored when record_template is set. Defaults to ,.
- fieldNames List<String>
- List of field names to be included in the Logpush output.
- outputType String
- Specifies the output type. Available values: ndjson,csv. Defaults tondjson.
- recordDelimiter String
- String to be inserted in-between the records as separator.
- recordPrefix String
- String to be prepended before each record. Defaults to {.
- recordSuffix String
- String to be appended after each record. Defaults to }.
- recordTemplate String
- String to use as template for each record instead of the default comma-separated list.
- sampleRate Number
- Specifies the sampling rate. Defaults to 1.
- timestampFormat String
- Specifies the format for timestamps. Available values: unixnano,unix,rfc3339. Defaults tounixnano.
Import
Import an account-scoped job.
$ pulumi import cloudflare:index/logpushJob:LogpushJob example account/<account_id>/<job_id>
Import a zone-scoped job.
$ pulumi import cloudflare:index/logpushJob:LogpushJob example zone/<zone_id>/<job_id>
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- Cloudflare pulumi/pulumi-cloudflare
- License
- Apache-2.0
- Notes
- This Pulumi package is based on the cloudflareTerraform Provider.