Create Logpush job
Creates a new Logpush job for an account or zone.
Security
API Token
The preferred authorization scheme for interacting with the Cloudflare API. Create a token.
API Email + API Key
The previous authorization scheme for interacting with the Cloudflare API, used in conjunction with a Global API key.
The previous authorization scheme for interacting with the Cloudflare API. When possible, use API tokens instead of Global API keys.
Accepted Permissions (at least one required)
Parameters
Uniquely identifies a resource (such as an s3 bucket) where data. will be pushed. Additional configuration parameters supported by the destination may be included.
The Account ID to use for this endpoint. Mutually exclusive with the Zone ID.
The filters to select the events to include and/or remove from your logs. For more information, refer to Filters.
This field is deprecated. Use output_options instead. Configuration string. It specifies things like requested fields and timestamp formats. If migrating from the logpull api, copy the url (full url or just the query string) of your call here, and logpush will keep on making this call for you, setting start and end times appropriately.
Create Logpush job
import os
from cloudflare import Cloudflare
client = Cloudflare(
api_token=os.environ.get("CLOUDFLARE_API_TOKEN"), # This is the default and can be omitted
)
logpush_job = client.logpush.jobs.create(
destination_conf="s3://mybucket/logs?region=us-west-2",
account_id="account_id",
dataset="gateway_dns",
enabled=False,
filter="{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}",
kind="",
max_upload_bytes=5000000,
max_upload_interval_seconds=30,
max_upload_records=1000,
name="example.com",
output_options={
"cve_2021_44228": False,
"batch_prefix": "",
"batch_suffix": "",
"field_delimiter": ",",
"field_names": ["Datetime", "DstIP", "SrcIP"],
"output_type": "ndjson",
"record_delimiter": "",
"record_prefix": "{",
"record_suffix": "}\n",
"sample_rate": 1,
"timestamp_format": "unixnano",
},
ownership_challenge="00000000000000000000",
)
print(logpush_job.id){
"errors": [],
"messages": [],
"result": {
"dataset": "gateway_dns",
"destination_conf": "s3://mybucket/logs?region=us-west-2",
"enabled": false,
"error_message": null,
"filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}",
"id": 1,
"kind": "",
"last_complete": null,
"last_error": null,
"max_upload_bytes": 5000000,
"max_upload_interval_seconds": 30,
"max_upload_records": 1000,
"name": "example.com",
"output_options": {
"CVE-2021-44228": false,
"batch_prefix": "",
"batch_suffix": "",
"field_delimiter": ",",
"field_names": [
"Datetime",
"DstIP",
"SrcIP"
],
"output_type": "ndjson",
"record_delimiter": "",
"record_prefix": "{",
"record_suffix": "}\n",
"sample_rate": 1,
"timestamp_format": "unixnano"
}
},
"success": true
}Returns Examples
{
"errors": [],
"messages": [],
"result": {
"dataset": "gateway_dns",
"destination_conf": "s3://mybucket/logs?region=us-west-2",
"enabled": false,
"error_message": null,
"filter": "{\"where\":{\"and\":[{\"key\":\"ClientRequestPath\",\"operator\":\"contains\",\"value\":\"/static\"},{\"key\":\"ClientRequestHost\",\"operator\":\"eq\",\"value\":\"example.com\"}]}}",
"id": 1,
"kind": "",
"last_complete": null,
"last_error": null,
"max_upload_bytes": 5000000,
"max_upload_interval_seconds": 30,
"max_upload_records": 1000,
"name": "example.com",
"output_options": {
"CVE-2021-44228": false,
"batch_prefix": "",
"batch_suffix": "",
"field_delimiter": ",",
"field_names": [
"Datetime",
"DstIP",
"SrcIP"
],
"output_type": "ndjson",
"record_delimiter": "",
"record_prefix": "{",
"record_suffix": "}\n",
"sample_rate": 1,
"timestamp_format": "unixnano"
}
},
"success": true
}