# Logpush # Datasets # Fields ## List fields `client.Logpush.Datasets.Fields.Get(ctx, datasetID, query) (*DatasetFieldGetResponse, error)` **get** `/{accounts_or_zones}/{account_or_zone_id}/logpush/datasets/{dataset_id}/fields` Lists all fields available for a dataset. The response result is. an object with key-value pairs, where keys are field names, and values are descriptions. ### Parameters - `datasetID DatasetFieldGetParamsDatasetID` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const DatasetFieldGetParamsDatasetIDAccessRequests DatasetFieldGetParamsDatasetID = "access_requests"` - `const DatasetFieldGetParamsDatasetIDAuditLogs DatasetFieldGetParamsDatasetID = "audit_logs"` - `const DatasetFieldGetParamsDatasetIDAuditLogsV2 DatasetFieldGetParamsDatasetID = "audit_logs_v2"` - `const DatasetFieldGetParamsDatasetIDBISOUserActions DatasetFieldGetParamsDatasetID = "biso_user_actions"` - `const DatasetFieldGetParamsDatasetIDCasbFindings DatasetFieldGetParamsDatasetID = "casb_findings"` - `const DatasetFieldGetParamsDatasetIDDevicePostureResults DatasetFieldGetParamsDatasetID = "device_posture_results"` - `const DatasetFieldGetParamsDatasetIDDEXApplicationTests DatasetFieldGetParamsDatasetID = "dex_application_tests"` - `const DatasetFieldGetParamsDatasetIDDEXDeviceStateEvents DatasetFieldGetParamsDatasetID = "dex_device_state_events"` - `const DatasetFieldGetParamsDatasetIDDLPForensicCopies DatasetFieldGetParamsDatasetID = "dlp_forensic_copies"` - `const DatasetFieldGetParamsDatasetIDDNSFirewallLogs DatasetFieldGetParamsDatasetID = "dns_firewall_logs"` - `const DatasetFieldGetParamsDatasetIDDNSLogs DatasetFieldGetParamsDatasetID = "dns_logs"` - `const DatasetFieldGetParamsDatasetIDEmailSecurityAlerts DatasetFieldGetParamsDatasetID = "email_security_alerts"` - `const DatasetFieldGetParamsDatasetIDFirewallEvents DatasetFieldGetParamsDatasetID = "firewall_events"` - `const DatasetFieldGetParamsDatasetIDGatewayDNS DatasetFieldGetParamsDatasetID = "gateway_dns"` - `const DatasetFieldGetParamsDatasetIDGatewayHTTP DatasetFieldGetParamsDatasetID = "gateway_http"` - `const DatasetFieldGetParamsDatasetIDGatewayNetwork DatasetFieldGetParamsDatasetID = "gateway_network"` - `const DatasetFieldGetParamsDatasetIDHTTPRequests DatasetFieldGetParamsDatasetID = "http_requests"` - `const DatasetFieldGetParamsDatasetIDIPSECLogs DatasetFieldGetParamsDatasetID = "ipsec_logs"` - `const DatasetFieldGetParamsDatasetIDMagicIDsDetections DatasetFieldGetParamsDatasetID = "magic_ids_detections"` - `const DatasetFieldGetParamsDatasetIDNELReports DatasetFieldGetParamsDatasetID = "nel_reports"` - `const DatasetFieldGetParamsDatasetIDNetworkAnalyticsLogs DatasetFieldGetParamsDatasetID = "network_analytics_logs"` - `const DatasetFieldGetParamsDatasetIDPageShieldEvents DatasetFieldGetParamsDatasetID = "page_shield_events"` - `const DatasetFieldGetParamsDatasetIDSinkholeHTTPLogs DatasetFieldGetParamsDatasetID = "sinkhole_http_logs"` - `const DatasetFieldGetParamsDatasetIDSpectrumEvents DatasetFieldGetParamsDatasetID = "spectrum_events"` - `const DatasetFieldGetParamsDatasetIDSSHLogs DatasetFieldGetParamsDatasetID = "ssh_logs"` - `const DatasetFieldGetParamsDatasetIDWARPConfigChanges DatasetFieldGetParamsDatasetID = "warp_config_changes"` - `const DatasetFieldGetParamsDatasetIDWARPToggleChanges DatasetFieldGetParamsDatasetID = "warp_toggle_changes"` - `const DatasetFieldGetParamsDatasetIDWorkersTraceEvents DatasetFieldGetParamsDatasetID = "workers_trace_events"` - `const DatasetFieldGetParamsDatasetIDZarazEvents DatasetFieldGetParamsDatasetID = "zaraz_events"` - `const DatasetFieldGetParamsDatasetIDZeroTrustNetworkSessions DatasetFieldGetParamsDatasetID = "zero_trust_network_sessions"` - `query DatasetFieldGetParams` - `AccountID param.Field[string]` The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type DatasetFieldGetResponse interface{…}` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) field, err := client.Logpush.Datasets.Fields.Get( context.TODO(), logpush.DatasetFieldGetParamsDatasetIDGatewayDNS, logpush.DatasetFieldGetParams{ }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", field) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": {} } ``` # Jobs ## List Logpush jobs for a dataset `client.Logpush.Datasets.Jobs.Get(ctx, datasetID, query) (*SinglePage[LogpushJob], error)` **get** `/{accounts_or_zones}/{account_or_zone_id}/logpush/datasets/{dataset_id}/jobs` Lists Logpush jobs for an account or zone for a dataset. ### Parameters - `datasetID DatasetJobGetParamsDatasetID` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const DatasetJobGetParamsDatasetIDAccessRequests DatasetJobGetParamsDatasetID = "access_requests"` - `const DatasetJobGetParamsDatasetIDAuditLogs DatasetJobGetParamsDatasetID = "audit_logs"` - `const DatasetJobGetParamsDatasetIDAuditLogsV2 DatasetJobGetParamsDatasetID = "audit_logs_v2"` - `const DatasetJobGetParamsDatasetIDBISOUserActions DatasetJobGetParamsDatasetID = "biso_user_actions"` - `const DatasetJobGetParamsDatasetIDCasbFindings DatasetJobGetParamsDatasetID = "casb_findings"` - `const DatasetJobGetParamsDatasetIDDevicePostureResults DatasetJobGetParamsDatasetID = "device_posture_results"` - `const DatasetJobGetParamsDatasetIDDEXApplicationTests DatasetJobGetParamsDatasetID = "dex_application_tests"` - `const DatasetJobGetParamsDatasetIDDEXDeviceStateEvents DatasetJobGetParamsDatasetID = "dex_device_state_events"` - `const DatasetJobGetParamsDatasetIDDLPForensicCopies DatasetJobGetParamsDatasetID = "dlp_forensic_copies"` - `const DatasetJobGetParamsDatasetIDDNSFirewallLogs DatasetJobGetParamsDatasetID = "dns_firewall_logs"` - `const DatasetJobGetParamsDatasetIDDNSLogs DatasetJobGetParamsDatasetID = "dns_logs"` - `const DatasetJobGetParamsDatasetIDEmailSecurityAlerts DatasetJobGetParamsDatasetID = "email_security_alerts"` - `const DatasetJobGetParamsDatasetIDFirewallEvents DatasetJobGetParamsDatasetID = "firewall_events"` - `const DatasetJobGetParamsDatasetIDGatewayDNS DatasetJobGetParamsDatasetID = "gateway_dns"` - `const DatasetJobGetParamsDatasetIDGatewayHTTP DatasetJobGetParamsDatasetID = "gateway_http"` - `const DatasetJobGetParamsDatasetIDGatewayNetwork DatasetJobGetParamsDatasetID = "gateway_network"` - `const DatasetJobGetParamsDatasetIDHTTPRequests DatasetJobGetParamsDatasetID = "http_requests"` - `const DatasetJobGetParamsDatasetIDIPSECLogs DatasetJobGetParamsDatasetID = "ipsec_logs"` - `const DatasetJobGetParamsDatasetIDMagicIDsDetections DatasetJobGetParamsDatasetID = "magic_ids_detections"` - `const DatasetJobGetParamsDatasetIDNELReports DatasetJobGetParamsDatasetID = "nel_reports"` - `const DatasetJobGetParamsDatasetIDNetworkAnalyticsLogs DatasetJobGetParamsDatasetID = "network_analytics_logs"` - `const DatasetJobGetParamsDatasetIDPageShieldEvents DatasetJobGetParamsDatasetID = "page_shield_events"` - `const DatasetJobGetParamsDatasetIDSinkholeHTTPLogs DatasetJobGetParamsDatasetID = "sinkhole_http_logs"` - `const DatasetJobGetParamsDatasetIDSpectrumEvents DatasetJobGetParamsDatasetID = "spectrum_events"` - `const DatasetJobGetParamsDatasetIDSSHLogs DatasetJobGetParamsDatasetID = "ssh_logs"` - `const DatasetJobGetParamsDatasetIDWARPConfigChanges DatasetJobGetParamsDatasetID = "warp_config_changes"` - `const DatasetJobGetParamsDatasetIDWARPToggleChanges DatasetJobGetParamsDatasetID = "warp_toggle_changes"` - `const DatasetJobGetParamsDatasetIDWorkersTraceEvents DatasetJobGetParamsDatasetID = "workers_trace_events"` - `const DatasetJobGetParamsDatasetIDZarazEvents DatasetJobGetParamsDatasetID = "zaraz_events"` - `const DatasetJobGetParamsDatasetIDZeroTrustNetworkSessions DatasetJobGetParamsDatasetID = "zero_trust_network_sessions"` - `query DatasetJobGetParams` - `AccountID param.Field[string]` The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) page, err := client.Logpush.Datasets.Jobs.Get( context.TODO(), logpush.DatasetJobGetParamsDatasetIDGatewayDNS, logpush.DatasetJobGetParams{ }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", page) } ``` #### Response ```json { "errors": [], "messages": [], "result": [ { "dataset": "gateway_dns", "destination_conf": "s3://mybucket/logs?region=us-west-2", "enabled": false, "error_message": null, "filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}", "id": 1, "kind": "", "last_complete": null, "last_error": null, "max_upload_bytes": 5000000, "max_upload_interval_seconds": 30, "max_upload_records": 1000, "name": "example.com", "output_options": { "CVE-2021-44228": false, "batch_prefix": "", "batch_suffix": "", "field_delimiter": ",", "field_names": [ "Datetime", "DstIP", "SrcIP" ], "output_type": "ndjson", "record_delimiter": "", "record_prefix": "{", "record_suffix": "}\n", "sample_rate": 1, "timestamp_format": "unixnano" } } ], "success": true } ``` # Edge ## List Instant Logs jobs `client.Logpush.Edge.Get(ctx, query) (*SinglePage[InstantLogpushJob], error)` **get** `/zones/{zone_id}/logpush/edge/jobs` Lists Instant Logs jobs for a zone. ### Parameters - `query EdgeGetParams` - `ZoneID param.Field[string]` Identifier. ### Returns - `type InstantLogpushJob struct{…}` - `DestinationConf string` Unique WebSocket address that will receive messages from Cloudflare’s edge. - `Fields string` Comma-separated list of fields. - `Filter string` Filters to drill down into specific events. - `Sample int64` The sample parameter is the sample rate of the records set by the client: "sample": 1 is 100% of records "sample": 10 is 10% and so on. - `SessionID string` Unique session id of the job. ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) page, err := client.Logpush.Edge.Get(context.TODO(), logpush.EdgeGetParams{ ZoneID: cloudflare.F("023e105f4ecef8ad9ca31a8372d0c353"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", page) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": [ { "destination_conf": "wss://logs.cloudflare.com/instant-logs/ws/sessions/99d471b1ca3c23cc8e30b6acec5db987", "fields": "ClientIP,ClientRequestHost,ClientRequestMethod,ClientRequestURI,EdgeEndTimestamp,EdgeResponseBytes,EdgeResponseStatus,EdgeStartTimestamp,RayID", "filter": "{\"where\":{\"and\":[{\"key\":\"ClientCountry\",\"operator\":\"neq\",\"value\":\"ca\"}]}}", "sample": 1, "session_id": "99d471b1ca3c23cc8e30b6acec5db987" } ] } ``` ## Create Instant Logs job `client.Logpush.Edge.New(ctx, params) (*InstantLogpushJob, error)` **post** `/zones/{zone_id}/logpush/edge/jobs` Creates a new Instant Logs job for a zone. ### Parameters - `params EdgeNewParams` - `ZoneID param.Field[string]` Path param: Identifier. - `Fields param.Field[string]` Body param: Comma-separated list of fields. - `Filter param.Field[string]` Body param: Filters to drill down into specific events. - `Sample param.Field[int64]` Body param: The sample parameter is the sample rate of the records set by the client: "sample": 1 is 100% of records "sample": 10 is 10% and so on. ### Returns - `type InstantLogpushJob struct{…}` - `DestinationConf string` Unique WebSocket address that will receive messages from Cloudflare’s edge. - `Fields string` Comma-separated list of fields. - `Filter string` Filters to drill down into specific events. - `Sample int64` The sample parameter is the sample rate of the records set by the client: "sample": 1 is 100% of records "sample": 10 is 10% and so on. - `SessionID string` Unique session id of the job. ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) instantLogpushJob, err := client.Logpush.Edge.New(context.TODO(), logpush.EdgeNewParams{ ZoneID: cloudflare.F("023e105f4ecef8ad9ca31a8372d0c353"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", instantLogpushJob.SessionID) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "destination_conf": "wss://logs.cloudflare.com/instant-logs/ws/sessions/99d471b1ca3c23cc8e30b6acec5db987", "fields": "ClientIP,ClientRequestHost,ClientRequestMethod,ClientRequestURI,EdgeEndTimestamp,EdgeResponseBytes,EdgeResponseStatus,EdgeStartTimestamp,RayID", "filter": "{\"where\":{\"and\":[{\"key\":\"ClientCountry\",\"operator\":\"neq\",\"value\":\"ca\"}]}}", "sample": 1, "session_id": "99d471b1ca3c23cc8e30b6acec5db987" } } ``` ## Domain Types ### Instant Logpush Job - `type InstantLogpushJob struct{…}` - `DestinationConf string` Unique WebSocket address that will receive messages from Cloudflare’s edge. - `Fields string` Comma-separated list of fields. - `Filter string` Filters to drill down into specific events. - `Sample int64` The sample parameter is the sample rate of the records set by the client: "sample": 1 is 100% of records "sample": 10 is 10% and so on. - `SessionID string` Unique session id of the job. # Jobs ## List Logpush jobs `client.Logpush.Jobs.List(ctx, query) (*SinglePage[LogpushJob], error)` **get** `/{accounts_or_zones}/{account_or_zone_id}/logpush/jobs` Lists Logpush jobs for an account or zone. ### Parameters - `query JobListParams` - `AccountID param.Field[string]` The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) page, err := client.Logpush.Jobs.List(context.TODO(), logpush.JobListParams{ }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", page) } ``` #### Response ```json { "errors": [], "messages": [], "result": [ { "dataset": "gateway_dns", "destination_conf": "s3://mybucket/logs?region=us-west-2", "enabled": false, "error_message": null, "filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}", "id": 1, "kind": "", "last_complete": null, "last_error": null, "max_upload_bytes": 5000000, "max_upload_interval_seconds": 30, "max_upload_records": 1000, "name": "example.com", "output_options": { "CVE-2021-44228": false, "batch_prefix": "", "batch_suffix": "", "field_delimiter": ",", "field_names": [ "Datetime", "DstIP", "SrcIP" ], "output_type": "ndjson", "record_delimiter": "", "record_prefix": "{", "record_suffix": "}\n", "sample_rate": 1, "timestamp_format": "unixnano" } } ], "success": true } ``` ## Get Logpush job details `client.Logpush.Jobs.Get(ctx, jobID, query) (*LogpushJob, error)` **get** `/{accounts_or_zones}/{account_or_zone_id}/logpush/jobs/{job_id}` Gets the details of a Logpush job. ### Parameters - `jobID int64` Unique id of the job. - `query JobGetParams` - `AccountID param.Field[string]` The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) logpushJob, err := client.Logpush.Jobs.Get( context.TODO(), int64(1), logpush.JobGetParams{ }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", logpushJob.ID) } ``` #### Response ```json { "errors": [], "messages": [], "result": { "dataset": "gateway_dns", "destination_conf": "s3://mybucket/logs?region=us-west-2", "enabled": false, "error_message": null, "filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}", "id": 1, "kind": "", "last_complete": null, "last_error": null, "max_upload_bytes": 5000000, "max_upload_interval_seconds": 30, "max_upload_records": 1000, "name": "example.com", "output_options": { "CVE-2021-44228": false, "batch_prefix": "", "batch_suffix": "", "field_delimiter": ",", "field_names": [ "Datetime", "DstIP", "SrcIP" ], "output_type": "ndjson", "record_delimiter": "", "record_prefix": "{", "record_suffix": "}\n", "sample_rate": 1, "timestamp_format": "unixnano" } }, "success": true } ``` ## Create Logpush job `client.Logpush.Jobs.New(ctx, params) (*LogpushJob, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/jobs` Creates a new Logpush job for an account or zone. ### Parameters - `params JobNewParams` - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. - `Dataset param.Field[JobNewParamsDataset]` Body param: Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const JobNewParamsDatasetAccessRequests JobNewParamsDataset = "access_requests"` - `const JobNewParamsDatasetAuditLogs JobNewParamsDataset = "audit_logs"` - `const JobNewParamsDatasetAuditLogsV2 JobNewParamsDataset = "audit_logs_v2"` - `const JobNewParamsDatasetBISOUserActions JobNewParamsDataset = "biso_user_actions"` - `const JobNewParamsDatasetCasbFindings JobNewParamsDataset = "casb_findings"` - `const JobNewParamsDatasetDevicePostureResults JobNewParamsDataset = "device_posture_results"` - `const JobNewParamsDatasetDEXApplicationTests JobNewParamsDataset = "dex_application_tests"` - `const JobNewParamsDatasetDEXDeviceStateEvents JobNewParamsDataset = "dex_device_state_events"` - `const JobNewParamsDatasetDLPForensicCopies JobNewParamsDataset = "dlp_forensic_copies"` - `const JobNewParamsDatasetDNSFirewallLogs JobNewParamsDataset = "dns_firewall_logs"` - `const JobNewParamsDatasetDNSLogs JobNewParamsDataset = "dns_logs"` - `const JobNewParamsDatasetEmailSecurityAlerts JobNewParamsDataset = "email_security_alerts"` - `const JobNewParamsDatasetFirewallEvents JobNewParamsDataset = "firewall_events"` - `const JobNewParamsDatasetGatewayDNS JobNewParamsDataset = "gateway_dns"` - `const JobNewParamsDatasetGatewayHTTP JobNewParamsDataset = "gateway_http"` - `const JobNewParamsDatasetGatewayNetwork JobNewParamsDataset = "gateway_network"` - `const JobNewParamsDatasetHTTPRequests JobNewParamsDataset = "http_requests"` - `const JobNewParamsDatasetIPSECLogs JobNewParamsDataset = "ipsec_logs"` - `const JobNewParamsDatasetMagicIDsDetections JobNewParamsDataset = "magic_ids_detections"` - `const JobNewParamsDatasetNELReports JobNewParamsDataset = "nel_reports"` - `const JobNewParamsDatasetNetworkAnalyticsLogs JobNewParamsDataset = "network_analytics_logs"` - `const JobNewParamsDatasetPageShieldEvents JobNewParamsDataset = "page_shield_events"` - `const JobNewParamsDatasetSinkholeHTTPLogs JobNewParamsDataset = "sinkhole_http_logs"` - `const JobNewParamsDatasetSpectrumEvents JobNewParamsDataset = "spectrum_events"` - `const JobNewParamsDatasetSSHLogs JobNewParamsDataset = "ssh_logs"` - `const JobNewParamsDatasetWARPConfigChanges JobNewParamsDataset = "warp_config_changes"` - `const JobNewParamsDatasetWARPToggleChanges JobNewParamsDataset = "warp_toggle_changes"` - `const JobNewParamsDatasetWorkersTraceEvents JobNewParamsDataset = "workers_trace_events"` - `const JobNewParamsDatasetZarazEvents JobNewParamsDataset = "zaraz_events"` - `const JobNewParamsDatasetZeroTrustNetworkSessions JobNewParamsDataset = "zero_trust_network_sessions"` - `Enabled param.Field[bool]` Body param: Flag that indicates if the job is enabled. - `Filter param.Field[string]` Body param: The filters to select the events to include and/or remove from your logs. For more information, refer to [Filters](https://developers.cloudflare.com/logs/reference/filters/). - `Frequency param.Field[JobNewParamsFrequency]` Body param: This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const JobNewParamsFrequencyHigh JobNewParamsFrequency = "high"` - `const JobNewParamsFrequencyLow JobNewParamsFrequency = "low"` - `Kind param.Field[JobNewParamsKind]` Body param: The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const JobNewParamsKindEmpty JobNewParamsKind = ""` - `const JobNewParamsKindEdge JobNewParamsKind = "edge"` - `LogpullOptions param.Field[string]` Body param: This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes param.Field[JobNewParamsMaxUploadBytes]` Body param: The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type JobNewParamsMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const JobNewParamsMaxUploadBytes0 JobNewParamsMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds param.Field[JobNewParamsMaxUploadIntervalSeconds]` Body param: The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type JobNewParamsMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const JobNewParamsMaxUploadIntervalSeconds0 JobNewParamsMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords param.Field[JobNewParamsMaxUploadRecords]` Body param: The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type JobNewParamsMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const JobNewParamsMaxUploadRecords0 JobNewParamsMaxUploadRecords = 0` - `int64` - `Name param.Field[string]` Body param: Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions param.Field[OutputOptions]` Body param: The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `OwnershipChallenge param.Field[string]` Body param: Ownership challenge token to prove destination ownership. ### Returns - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) logpushJob, err := client.Logpush.Jobs.New(context.TODO(), logpush.JobNewParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), Dataset: cloudflare.F(logpush.JobNewParamsDatasetGatewayDNS), Filter: cloudflare.F(`{"where":{"and":[{"key":"ClientRequestPath","operator":"contains","value":"/static"},{"key":"ClientRequestHost","operator":"eq","value":"example.com"}]}}`), MaxUploadBytes: cloudflare.F(logpush.JobNewParamsMaxUploadBytes(5000000)), MaxUploadIntervalSeconds: cloudflare.F(logpush.JobNewParamsMaxUploadIntervalSeconds(30)), MaxUploadRecords: cloudflare.F(logpush.JobNewParamsMaxUploadRecords(1000)), Name: cloudflare.F("example.com"), OutputOptions: cloudflare.F(logpush.OutputOptionsParam{ Cve2021_44228: cloudflare.F(false), BatchPrefix: cloudflare.F(""), BatchSuffix: cloudflare.F(""), FieldDelimiter: cloudflare.F(","), FieldNames: cloudflare.F([]string{"Datetime", "DstIP", "SrcIP"}), OutputType: cloudflare.F(logpush.OutputOptionsOutputTypeNdjson), RecordDelimiter: cloudflare.F(""), RecordPrefix: cloudflare.F("{"), RecordSuffix: cloudflare.F("}\n"), SampleRate: cloudflare.F(1.000000), TimestampFormat: cloudflare.F(logpush.OutputOptionsTimestampFormatUnixnano), }), OwnershipChallenge: cloudflare.F("00000000000000000000"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", logpushJob.ID) } ``` #### Response ```json { "errors": [], "messages": [], "result": { "dataset": "gateway_dns", "destination_conf": "s3://mybucket/logs?region=us-west-2", "enabled": false, "error_message": null, "filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}", "id": 1, "kind": "", "last_complete": null, "last_error": null, "max_upload_bytes": 5000000, "max_upload_interval_seconds": 30, "max_upload_records": 1000, "name": "example.com", "output_options": { "CVE-2021-44228": false, "batch_prefix": "", "batch_suffix": "", "field_delimiter": ",", "field_names": [ "Datetime", "DstIP", "SrcIP" ], "output_type": "ndjson", "record_delimiter": "", "record_prefix": "{", "record_suffix": "}\n", "sample_rate": 1, "timestamp_format": "unixnano" } }, "success": true } ``` ## Update Logpush job `client.Logpush.Jobs.Update(ctx, jobID, params) (*LogpushJob, error)` **put** `/{accounts_or_zones}/{account_or_zone_id}/logpush/jobs/{job_id}` Updates a Logpush job. ### Parameters - `jobID int64` Unique id of the job. - `params JobUpdateParams` - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled param.Field[bool]` Body param: Flag that indicates if the job is enabled. - `Filter param.Field[string]` Body param: The filters to select the events to include and/or remove from your logs. For more information, refer to [Filters](https://developers.cloudflare.com/logs/reference/filters/). - `Frequency param.Field[JobUpdateParamsFrequency]` Body param: This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const JobUpdateParamsFrequencyHigh JobUpdateParamsFrequency = "high"` - `const JobUpdateParamsFrequencyLow JobUpdateParamsFrequency = "low"` - `Kind param.Field[JobUpdateParamsKind]` Body param: The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const JobUpdateParamsKindEmpty JobUpdateParamsKind = ""` - `const JobUpdateParamsKindEdge JobUpdateParamsKind = "edge"` - `LogpullOptions param.Field[string]` Body param: This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes param.Field[JobUpdateParamsMaxUploadBytes]` Body param: The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type JobUpdateParamsMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const JobUpdateParamsMaxUploadBytes0 JobUpdateParamsMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds param.Field[JobUpdateParamsMaxUploadIntervalSeconds]` Body param: The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type JobUpdateParamsMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const JobUpdateParamsMaxUploadIntervalSeconds0 JobUpdateParamsMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords param.Field[JobUpdateParamsMaxUploadRecords]` Body param: The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type JobUpdateParamsMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const JobUpdateParamsMaxUploadRecords0 JobUpdateParamsMaxUploadRecords = 0` - `int64` - `Name param.Field[string]` Body param: Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions param.Field[OutputOptions]` Body param: The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `OwnershipChallenge param.Field[string]` Body param: Ownership challenge token to prove destination ownership. ### Returns - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) logpushJob, err := client.Logpush.Jobs.Update( context.TODO(), int64(1), logpush.JobUpdateParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), Filter: cloudflare.F(`{"where":{"and":[{"key":"ClientRequestPath","operator":"contains","value":"/static"},{"key":"ClientRequestHost","operator":"eq","value":"example.com"}]}}`), MaxUploadBytes: cloudflare.F(logpush.JobUpdateParamsMaxUploadBytes(5000000)), MaxUploadIntervalSeconds: cloudflare.F(logpush.JobUpdateParamsMaxUploadIntervalSeconds(30)), MaxUploadRecords: cloudflare.F(logpush.JobUpdateParamsMaxUploadRecords(1000)), OutputOptions: cloudflare.F(logpush.OutputOptionsParam{ Cve2021_44228: cloudflare.F(false), BatchPrefix: cloudflare.F(""), BatchSuffix: cloudflare.F(""), FieldDelimiter: cloudflare.F(","), FieldNames: cloudflare.F([]string{"Datetime", "DstIP", "SrcIP"}), OutputType: cloudflare.F(logpush.OutputOptionsOutputTypeNdjson), RecordDelimiter: cloudflare.F(""), RecordPrefix: cloudflare.F("{"), RecordSuffix: cloudflare.F("}\n"), SampleRate: cloudflare.F(1.000000), TimestampFormat: cloudflare.F(logpush.OutputOptionsTimestampFormatUnixnano), }), OwnershipChallenge: cloudflare.F("00000000000000000000"), }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", logpushJob.ID) } ``` #### Response ```json { "errors": [], "messages": [], "result": { "dataset": "gateway_dns", "destination_conf": "s3://mybucket/logs?region=us-west-2", "enabled": false, "error_message": null, "filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}", "id": 1, "kind": "", "last_complete": null, "last_error": null, "max_upload_bytes": 5000000, "max_upload_interval_seconds": 30, "max_upload_records": 1000, "name": "example.com", "output_options": { "CVE-2021-44228": false, "batch_prefix": "", "batch_suffix": "", "field_delimiter": ",", "field_names": [ "Datetime", "DstIP", "SrcIP" ], "output_type": "ndjson", "record_delimiter": "", "record_prefix": "{", "record_suffix": "}\n", "sample_rate": 1, "timestamp_format": "unixnano" } }, "success": true } ``` ## Delete Logpush job `client.Logpush.Jobs.Delete(ctx, jobID, body) (*JobDeleteResponse, error)` **delete** `/{accounts_or_zones}/{account_or_zone_id}/logpush/jobs/{job_id}` Deletes a Logpush job. ### Parameters - `jobID int64` Unique id of the job. - `body JobDeleteParams` - `AccountID param.Field[string]` The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type JobDeleteResponse struct{…}` - `ID int64` Unique id of the job. ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) job, err := client.Logpush.Jobs.Delete( context.TODO(), int64(1), logpush.JobDeleteParams{ }, ) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", job.ID) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "id": 1 } } ``` ## Domain Types ### Logpush Job - `type LogpushJob struct{…}` - `ID int64` Unique id of the job. - `Dataset LogpushJobDataset` Name of the dataset. A list of supported datasets can be found on the [Developer Docs](https://developers.cloudflare.com/logs/reference/log-fields/). - `const LogpushJobDatasetAccessRequests LogpushJobDataset = "access_requests"` - `const LogpushJobDatasetAuditLogs LogpushJobDataset = "audit_logs"` - `const LogpushJobDatasetAuditLogsV2 LogpushJobDataset = "audit_logs_v2"` - `const LogpushJobDatasetBISOUserActions LogpushJobDataset = "biso_user_actions"` - `const LogpushJobDatasetCasbFindings LogpushJobDataset = "casb_findings"` - `const LogpushJobDatasetDevicePostureResults LogpushJobDataset = "device_posture_results"` - `const LogpushJobDatasetDEXApplicationTests LogpushJobDataset = "dex_application_tests"` - `const LogpushJobDatasetDEXDeviceStateEvents LogpushJobDataset = "dex_device_state_events"` - `const LogpushJobDatasetDLPForensicCopies LogpushJobDataset = "dlp_forensic_copies"` - `const LogpushJobDatasetDNSFirewallLogs LogpushJobDataset = "dns_firewall_logs"` - `const LogpushJobDatasetDNSLogs LogpushJobDataset = "dns_logs"` - `const LogpushJobDatasetEmailSecurityAlerts LogpushJobDataset = "email_security_alerts"` - `const LogpushJobDatasetFirewallEvents LogpushJobDataset = "firewall_events"` - `const LogpushJobDatasetGatewayDNS LogpushJobDataset = "gateway_dns"` - `const LogpushJobDatasetGatewayHTTP LogpushJobDataset = "gateway_http"` - `const LogpushJobDatasetGatewayNetwork LogpushJobDataset = "gateway_network"` - `const LogpushJobDatasetHTTPRequests LogpushJobDataset = "http_requests"` - `const LogpushJobDatasetIPSECLogs LogpushJobDataset = "ipsec_logs"` - `const LogpushJobDatasetMagicIDsDetections LogpushJobDataset = "magic_ids_detections"` - `const LogpushJobDatasetNELReports LogpushJobDataset = "nel_reports"` - `const LogpushJobDatasetNetworkAnalyticsLogs LogpushJobDataset = "network_analytics_logs"` - `const LogpushJobDatasetPageShieldEvents LogpushJobDataset = "page_shield_events"` - `const LogpushJobDatasetSinkholeHTTPLogs LogpushJobDataset = "sinkhole_http_logs"` - `const LogpushJobDatasetSpectrumEvents LogpushJobDataset = "spectrum_events"` - `const LogpushJobDatasetSSHLogs LogpushJobDataset = "ssh_logs"` - `const LogpushJobDatasetWARPConfigChanges LogpushJobDataset = "warp_config_changes"` - `const LogpushJobDatasetWARPToggleChanges LogpushJobDataset = "warp_toggle_changes"` - `const LogpushJobDatasetWorkersTraceEvents LogpushJobDataset = "workers_trace_events"` - `const LogpushJobDatasetZarazEvents LogpushJobDataset = "zaraz_events"` - `const LogpushJobDatasetZeroTrustNetworkSessions LogpushJobDataset = "zero_trust_network_sessions"` - `DestinationConf string` Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `Enabled bool` Flag that indicates if the job is enabled. - `ErrorMessage string` If not null, the job is currently failing. Failures are usually. repetitive (example: no permissions to write to destination bucket). Only the last failure is recorded. On successful execution of a job the error_message and last_error are set to null. - `Frequency LogpushJobFrequency` This field is deprecated. Please use `max_upload_*` parameters instead. . The frequency at which Cloudflare sends batches of logs to your destination. Setting frequency to high sends your logs in larger quantities of smaller files. Setting frequency to low sends logs in smaller quantities of larger files. - `const LogpushJobFrequencyHigh LogpushJobFrequency = "high"` - `const LogpushJobFrequencyLow LogpushJobFrequency = "low"` - `Kind LogpushJobKind` The kind parameter (optional) is used to differentiate between Logpush and Edge Log Delivery jobs (when supported by the dataset). - `const LogpushJobKindEmpty LogpushJobKind = ""` - `const LogpushJobKindEdge LogpushJobKind = "edge"` - `LastComplete Time` Records the last time for which logs have been successfully pushed. If the last successful push was for logs range 2018-07-23T10:00:00Z to 2018-07-23T10:01:00Z then the value of this field will be 2018-07-23T10:01:00Z. If the job has never run or has just been enabled and hasn't run yet then the field will be empty. - `LastError Time` Records the last time the job failed. If not null, the job is currently. failing. If null, the job has either never failed or has run successfully at least once since last failure. See also the error_message field. - `LogpullOptions string` This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `MaxUploadBytes LogpushJobMaxUploadBytes` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `type LogpushJobMaxUploadBytes int64` The maximum uncompressed file size of a batch of logs. This setting value must be between `5 MB` and `1 GB`, or `0` to disable it. Note that you cannot set a minimum file size; this means that log files may be much smaller than this batch size. - `const LogpushJobMaxUploadBytes0 LogpushJobMaxUploadBytes = 0` - `int64` - `MaxUploadIntervalSeconds LogpushJobMaxUploadIntervalSeconds` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `type LogpushJobMaxUploadIntervalSeconds int64` The maximum interval in seconds for log batches. This setting must be between 30 and 300 seconds (5 minutes), or `0` to disable it. Note that you cannot specify a minimum interval for log batches; this means that log files may be sent in shorter intervals than this. - `const LogpushJobMaxUploadIntervalSeconds0 LogpushJobMaxUploadIntervalSeconds = 0` - `int64` - `MaxUploadRecords LogpushJobMaxUploadRecords` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `type LogpushJobMaxUploadRecords int64` The maximum number of log lines per batch. This setting must be between 1000 and 1,000,000 lines, or `0` to disable it. Note that you cannot specify a minimum number of log lines per batch; this means that log files may contain many fewer lines than this. - `const LogpushJobMaxUploadRecords0 LogpushJobMaxUploadRecords = 0` - `int64` - `Name string` Optional human readable job name. Not unique. Cloudflare suggests. that you set this to a meaningful string, like the domain name, to make it easier to identify your job. - `OutputOptions OutputOptions` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` ### Output Options - `type OutputOptions struct{…}` The structured replacement for `logpull_options`. When including this field, the `logpull_option` field will be ignored. - `BatchPrefix string` String to be prepended before each batch. - `BatchSuffix string` String to be appended after each batch. - `Cve2021_44228 bool` If set to true, will cause all occurrences of `${` in the generated files to be replaced with `x{`. - `FieldDelimiter string` String to join fields. This field be ignored when `record_template` is set. - `FieldNames []string` List of field names to be included in the Logpush output. For the moment, there is no option to add all fields at once, so you must specify all the fields names you are interested in. - `MergeSubrequests bool` If set to true, subrequests will be merged into the parent request. Only supported for the `http_requests` dataset. - `OutputType OutputOptionsOutputType` Specifies the output type, such as `ndjson` or `csv`. This sets default values for the rest of the settings, depending on the chosen output type. Some formatting rules, like string quoting, are different between output types. - `const OutputOptionsOutputTypeNdjson OutputOptionsOutputType = "ndjson"` - `const OutputOptionsOutputTypeCsv OutputOptionsOutputType = "csv"` - `RecordDelimiter string` String to be inserted in-between the records as separator. - `RecordPrefix string` String to be prepended before each record. - `RecordSuffix string` String to be appended after each record. - `RecordTemplate string` String to use as template for each record instead of the default json key value mapping. All fields used in the template must be present in `field_names` as well, otherwise they will end up as null. Format as a Go `text/template` without any standard functions, like conditionals, loops, sub-templates, etc. - `SampleRate float64` Floating number to specify sampling rate. Sampling is applied on top of filtering, and regardless of the current `sample_interval` of the data. - `TimestampFormat OutputOptionsTimestampFormat` String to specify the format for timestamps, such as `unixnano`, `unix`, `rfc3339`, `rfc3339ms` or `rfc3339ns`. - `const OutputOptionsTimestampFormatUnixnano OutputOptionsTimestampFormat = "unixnano"` - `const OutputOptionsTimestampFormatUnix OutputOptionsTimestampFormat = "unix"` - `const OutputOptionsTimestampFormatRfc3339 OutputOptionsTimestampFormat = "rfc3339"` - `const OutputOptionsTimestampFormatRfc3339ms OutputOptionsTimestampFormat = "rfc3339ms"` - `const OutputOptionsTimestampFormatRfc3339ns OutputOptionsTimestampFormat = "rfc3339ns"` # Ownership ## Get ownership challenge `client.Logpush.Ownership.New(ctx, params) (*OwnershipNewResponse, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/ownership` Gets a new ownership challenge sent to your destination. ### Parameters - `params OwnershipNewParams` - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type OwnershipNewResponse struct{…}` - `Filename string` - `Message string` - `Valid bool` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) ownership, err := client.Logpush.Ownership.New(context.TODO(), logpush.OwnershipNewParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", ownership.Valid) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "filename": "logs/challenge-filename.txt", "message": "", "valid": true } } ``` ## Validate ownership challenge `client.Logpush.Ownership.Validate(ctx, params) (*OwnershipValidation, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/ownership/validate` Validates ownership challenge of the destination. ### Parameters - `params OwnershipValidateParams` - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `OwnershipChallenge param.Field[string]` Body param: Ownership challenge token to prove destination ownership. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type OwnershipValidation struct{…}` - `Valid bool` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) ownershipValidation, err := client.Logpush.Ownership.Validate(context.TODO(), logpush.OwnershipValidateParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), OwnershipChallenge: cloudflare.F("00000000000000000000"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", ownershipValidation.Valid) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "valid": true } } ``` ## Domain Types ### Ownership Validation - `type OwnershipValidation struct{…}` - `Valid bool` # Validate ## Validate destination `client.Logpush.Validate.Destination(ctx, params) (*ValidateDestinationResponse, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/validate/destination` Validates destination. ### Parameters - `params ValidateDestinationParams` - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type ValidateDestinationResponse struct{…}` - `Message string` - `Valid bool` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) response, err := client.Logpush.Validate.Destination(context.TODO(), logpush.ValidateDestinationParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", response.Valid) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "message": "", "valid": true } } ``` ## Check destination exists `client.Logpush.Validate.DestinationExists(ctx, params) (*ValidateDestinationExistsResponse, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/validate/destination/exists` Checks if there is an existing job with a destination. ### Parameters - `params ValidateDestinationExistsParams` - `DestinationConf param.Field[string]` Body param: Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type ValidateDestinationExistsResponse struct{…}` - `Exists bool` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) response, err := client.Logpush.Validate.DestinationExists(context.TODO(), logpush.ValidateDestinationExistsParams{ DestinationConf: cloudflare.F("s3://mybucket/logs?region=us-west-2"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", response.Exists) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "exists": false } } ``` ## Validate origin `client.Logpush.Validate.Origin(ctx, params) (*ValidateOriginResponse, error)` **post** `/{accounts_or_zones}/{account_or_zone_id}/logpush/validate/origin` Validates logpull origin with logpull_options. ### Parameters - `params ValidateOriginParams` - `LogpullOptions param.Field[string]` Body param: This field is deprecated. Use `output_options` instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately. - `AccountID param.Field[string]` Path param: The Account ID to use for this endpoint. Mutually exclusive with the Zone ID. - `ZoneID param.Field[string]` Path param: The Zone ID to use for this endpoint. Mutually exclusive with the Account ID. ### Returns - `type ValidateOriginResponse struct{…}` - `Message string` - `Valid bool` ### Example ```go package main import ( "context" "fmt" "github.com/cloudflare/cloudflare-go" "github.com/cloudflare/cloudflare-go/logpush" "github.com/cloudflare/cloudflare-go/option" ) func main() { client := cloudflare.NewClient( option.WithAPIToken("Sn3lZJTBX6kkg7OdcBUAxOO963GEIyGQqnFTOFYY"), ) response, err := client.Logpush.Validate.Origin(context.TODO(), logpush.ValidateOriginParams{ LogpullOptions: cloudflare.F("fields=RayID,ClientIP,EdgeStartTimestamp×tamps=rfc3339"), }) if err != nil { panic(err.Error()) } fmt.Printf("%+v\n", response.Valid) } ``` #### Response ```json { "errors": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "messages": [ { "code": 1000, "message": "message", "documentation_url": "documentation_url", "source": { "pointer": "pointer" } } ], "success": true, "result": { "message": "", "valid": true } } ```