diff --git a/src/content/docs/logs/get-started/enable-destinations/datadog.mdx b/src/content/docs/logs/get-started/enable-destinations/datadog.mdx index 6921d27c6e294f5..0f85a0e92725434 100644 --- a/src/content/docs/logs/get-started/enable-destinations/datadog.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/datadog.mdx @@ -9,7 +9,7 @@ head: --- -import { Render, TabItem, Tabs } from "~/components" +import { Render, TabItem, Tabs, APIRequest } from "~/components" Cloudflare Logpush supports pushing logs directly to Datadog via the Cloudflare dashboard or via API. @@ -100,21 +100,30 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol Example request using cURL: -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "destination_conf": "datadog://?header_DD-API-KEY=&ddsource=cloudflare&service=&host=&ddtags=", - "output_options": { - "field_names": ["ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI", "EdgeEndTimestamp", "EdgeResponseBytes", "EdgeResponseStatus" ,"EdgeStartTimestamp", "RayID"], - "timestamp_format": "rfc3339" - }, - "dataset": "http_requests" -}' -``` +", + destination_conf: + "datadog://?header_DD-API-KEY=&ddsource=cloudflare&service=&host=&ddtags=", + output_options: { + field_names: [ + "ClientIP", + "ClientRequestHost", + "ClientRequestMethod", + "ClientRequestURI", + "EdgeEndTimestamp", + "EdgeResponseBytes", + "EdgeResponseStatus", + "EdgeStartTimestamp", + "RayID" + ], + timestamp_format: "rfc3339" + }, + dataset: "http_requests" + }} +/> Response: @@ -146,16 +155,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. You will use Example request using cURL: -```bash -curl --request PUT \ -https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "enabled": true -}' -``` + Response: diff --git a/src/content/docs/logs/get-started/enable-destinations/elastic.mdx b/src/content/docs/logs/get-started/enable-destinations/elastic.mdx index beee6e683177b59..a96f8f68a4c2307 100644 --- a/src/content/docs/logs/get-started/enable-destinations/elastic.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/elastic.mdx @@ -9,6 +9,8 @@ head: --- +import { APIRequest } from "~/components" + Push your Cloudflare logs to Elastic for instant visibility and insights. Enabling this integration with Elastic comes with a predefined dashboard to view all of your Cloudflare observability and security data with ease. The Cloudflare Logpush integration can be used in three different modes to collect data: @@ -39,21 +41,19 @@ Add the same custom header along with its value on both sides for additional sec For example, while creating a job along with a header and value for a particular dataset: -```bash -curl --location https://api.cloudflare.com/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data-raw '{ - "name": "", - "destination_conf": "https://:?header_=", - "dataset": "http_requests", - "output_options": { - "field_names": ["RayID","EdgeStartTimestamp"], - "timestamp_format": "rfc3339" - } -}' -``` +", + destination_conf: "https://:?header_=", + dataset: "http_requests", + output_options: { + field_names: ["RayID", "EdgeStartTimestamp"], + timestamp_format: "rfc3339" + } + }} +/> ## Enable the Integration in Elastic diff --git a/src/content/docs/logs/get-started/enable-destinations/http.mdx b/src/content/docs/logs/get-started/enable-destinations/http.mdx index cc7adabedb2cf08..c956607e62e446f 100644 --- a/src/content/docs/logs/get-started/enable-destinations/http.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/http.mdx @@ -6,7 +6,7 @@ sidebar: --- -import { Render } from "~/components" +import { Render, APIRequest } from "~/components" Cloudflare Logpush now supports the ability to send logs to configurable HTTP endpoints. @@ -68,21 +68,20 @@ The `ownership_challenge` parameter is not required to create a Logpush job to a ## Example curl request -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "theburritobot.com-https", - "output_options": { - "field_names": ["EdgeStartTimestamp", "RayID"], - "timestamp_format": "rfc3339" - }, - "destination_conf": "https://logs.example.com?header_Authorization=Basic%20REDACTED&tags=host:theburritobot.com,dataset:http_requests", - "max_upload_bytes": 5000000, - "max_upload_records": 1000, - "dataset": "http_requests", - "enabled": true -}' -``` + diff --git a/src/content/docs/logs/get-started/enable-destinations/ibm-cloud-logs.mdx b/src/content/docs/logs/get-started/enable-destinations/ibm-cloud-logs.mdx index 29b6e86894acb29..bd059cb217d7df6 100644 --- a/src/content/docs/logs/get-started/enable-destinations/ibm-cloud-logs.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/ibm-cloud-logs.mdx @@ -9,6 +9,8 @@ head: --- +import { APIRequest } from "~/components" + Cloudflare Logpush supports pushing logs directly to IBM Cloud Logs via API. The dashboard functionality will later be added. ## Manage via API @@ -43,28 +45,26 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol Example request using cURL: -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "output_options": { - "output_type": "ndjson", - "timestamp_format": "rfc3339", - "batch_prefix": "[", - "batch_suffix": "]", - "record_prefix": "{\"applicationName\":\"ibm-platform-log\",\"subsystemName\":\"internet-svcs:logpush\",\"text\":{", - "record_suffix": "}}", - "record_delimiter": "," - }, - "destination_conf": "ibmcl://.ingress..logs.cloud.ibm.com/logs/v1/singles?ibm_api_key=", - "max_upload_bytes": 2000000, - "dataset": "http_requests", - "enabled": true -}' -``` +", + output_options: { + output_type: "ndjson", + timestamp_format: "rfc3339", + batch_prefix: "[", + batch_suffix: "]", + record_prefix: "{\"applicationName\":\"ibm-platform-log\",\"subsystemName\":\"internet-svcs:logpush\",\"text\":{", + record_suffix: "}}", + record_delimiter: "," + }, + destination_conf: "ibmcl://.ingress..logs.cloud.ibm.com/logs/v1/singles?ibm_api_key=", + max_upload_bytes: 2000000, + dataset: "http_requests", + enabled: true + }} +/> Response: @@ -103,16 +103,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. You will use Example request using cURL: -```bash -curl --request PUT \ -https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "enabled": true -}' -``` + Response: diff --git a/src/content/docs/logs/get-started/enable-destinations/ibm-qradar.mdx b/src/content/docs/logs/get-started/enable-destinations/ibm-qradar.mdx index 4b68b3fec645ae1..d3367c43159e71c 100644 --- a/src/content/docs/logs/get-started/enable-destinations/ibm-qradar.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/ibm-qradar.mdx @@ -5,6 +5,8 @@ sidebar: order: 98 --- +import { APIRequest } from "~/components" + To configure a QRadar/Cloudflare integration you have the option to use one of the following methods: - [HTTP Receiver protocol](/logs/get-started/enable-destinations/ibm-qradar/#http-receiver-protocol) @@ -16,45 +18,59 @@ To send Cloudflare logs to QRadar you need to create a [Logpush job to HTTP endp ### Cloudflare Firewall events -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "output_options": { - "field_names": ["Action", "ClientIP", "ClientASN", "ClientASNDescription", "ClientCountry", "ClientIPClass","ClientRefererHost", "ClientRefererPath", "ClientRefererQuery", "ClientRefererScheme", "ClientRequestHost","ClientRequestMethod", "ClientRequestPath", "ClientRequestProtocol", "ClientRequestQuery", "ClientRequestScheme","ClientRequestUserAgent", "EdgeColoCode", "EdgeResponseStatus", "Kind", "MatchIndex", "Metadata","OriginResponseStatus", "OriginatorRayID", "RayID", "RuleID", "Source", "Datetime"], - "timestamp_format": "rfc3339" - }, - "destination_conf": ":", - "max_upload_bytes": 5000000, - "max_upload_records": 1000, - "dataset": "firewall_events", - "enabled": true -}' -``` +", + output_options: { + field_names: [ + "Action", "ClientIP", "ClientASN", "ClientASNDescription", "ClientCountry", "ClientIPClass", + "ClientRefererHost", "ClientRefererPath", "ClientRefererQuery", "ClientRefererScheme", + "ClientRequestHost", "ClientRequestMethod", "ClientRequestPath", "ClientRequestProtocol", + "ClientRequestQuery", "ClientRequestScheme", "ClientRequestUserAgent", "EdgeColoCode", + "EdgeResponseStatus", "Kind", "MatchIndex", "Metadata", "OriginResponseStatus", + "OriginatorRayID", "RayID", "RuleID", "Source", "Datetime" + ], + timestamp_format: "rfc3339" + }, + destination_conf: ":", + max_upload_bytes: 5000000, + max_upload_records: 1000, + dataset: "firewall_events", + enabled: true + }} +/> + ### Cloudflare HTTP events -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "output_options": { - "field_names": ["ClientRequestMethod", "EdgeResponseStatus", "ClientIP", "ClientSrcPort", "CacheCacheStatus","ClientCountry", "ClientDeviceType", "ClientIPClass", "ClientMTLSAuthCertFingerprint", "ClientMTLSAuthStatus","ClientRegionCode", "ClientRequestBytes", "ClientRequestHost", "ClientRequestPath", "ClientRequestProtocol","ClientRequestReferer", "ClientRequestScheme", "ClientRequestSource", "ClientRequestURI", "ClientRequestUserAgent","ClientSSLCipher", "ClientSSLProtocol", "ClientXRequestedWith", "EdgeEndTimestamp", "EdgeRequestHost","EdgeResponseBodyBytes", "EdgeResponseBytes", "EdgeServerIP", "EdgeStartTimestamp", "SecurityActions","SecurityRuleIDs", "SecuritySources", "OriginIP", "OriginResponseStatus", "OriginSSLProtocol", "ParentRayID", "RayID", "SecurityAction", "WAFAttackScore", "SecurityRuleID", "SecurityRuleDescription", "WAFSQLiAttackScore","WAFXSSAttackScore", "EdgeStartTimestamp"], - "timestamp_format": "rfc3339" - }, - "destination_conf": ":", - "max_upload_bytes": 5000000, - "max_upload_records": 1000, - "dataset": "http_requests", - "enabled": true -}' -``` +", + output_options: { + field_names: [ + "ClientRequestMethod", "EdgeResponseStatus", "ClientIP", "ClientSrcPort", "CacheCacheStatus", + "ClientCountry", "ClientDeviceType", "ClientIPClass", "ClientMTLSAuthCertFingerprint", "ClientMTLSAuthStatus", + "ClientRegionCode", "ClientRequestBytes", "ClientRequestHost", "ClientRequestPath", "ClientRequestProtocol", + "ClientRequestReferer", "ClientRequestScheme", "ClientRequestSource", "ClientRequestURI", "ClientRequestUserAgent", + "ClientSSLCipher", "ClientSSLProtocol", "ClientXRequestedWith", "EdgeEndTimestamp", "EdgeRequestHost", + "EdgeResponseBodyBytes", "EdgeResponseBytes", "EdgeServerIP", "EdgeStartTimestamp", "SecurityActions", + "SecurityRuleIDs", "SecuritySources", "OriginIP", "OriginResponseStatus", "OriginSSLProtocol", "ParentRayID", + "RayID", "SecurityAction", "WAFAttackScore", "SecurityRuleID", "SecurityRuleDescription", "WAFSQLiAttackScore", + "WAFXSSAttackScore", "EdgeStartTimestamp" + ], + timestamp_format: "rfc3339" + }, + destination_conf: ":", + max_upload_bytes: 5000000, + max_upload_records: 1000, + dataset: "http_requests", + enabled: true + }} +/> Cloudflare checks the accessibility of the IP address, port, and validates the certificate of the HTTP Receive log source. If all parameters are valid, a Logpush is created, and starts to send events to HTTP Receiver log source. diff --git a/src/content/docs/logs/get-started/enable-destinations/new-relic.mdx b/src/content/docs/logs/get-started/enable-destinations/new-relic.mdx index 991b992a21cb6bd..0029b8f132e719f 100644 --- a/src/content/docs/logs/get-started/enable-destinations/new-relic.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/new-relic.mdx @@ -9,7 +9,7 @@ head: --- -import { Render, TabItem, Tabs } from "~/components" +import { Render, TabItem, Tabs, APIRequest } from "~/components" Cloudflare Logpush supports pushing logs directly to New Relic via the Cloudflare dashboard or via API. @@ -85,23 +85,31 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol Example request using cURL: -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "output_options": { - "field_names": ["ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI", "EdgeEndTimestamp","EdgeResponseBytes", "EdgeResponseStatus", "EdgeStartTimestamp", "RayID"], - "timestamp_format": "unix" - }, - "destination_conf": "https://log-api.newrelic.com/log/v1?Api-Key=&format=cloudflare", - "max_upload_bytes": 5000000, - "dataset": "http_requests", - "enabled": true -}' -``` +", + output_options: { + field_names: [ + "ClientIP", + "ClientRequestHost", + "ClientRequestMethod", + "ClientRequestURI", + "EdgeEndTimestamp", + "EdgeResponseBytes", + "EdgeResponseStatus", + "EdgeStartTimestamp", + "RayID" + ], + timestamp_format: "unix" + }, + destination_conf: "https://log-api.newrelic.com/log/v1?Api-Key=&format=cloudflare", + max_upload_bytes: 5000000, + dataset: "http_requests", + enabled: true + }} +/> Response: @@ -135,16 +143,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. You will use Example request using cURL: -```bash -curl --request PUT \ -https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "enabled": true -}' -``` + Response: diff --git a/src/content/docs/logs/get-started/enable-destinations/r2.mdx b/src/content/docs/logs/get-started/enable-destinations/r2.mdx index a1b4995c69508d5..21f74fe98864433 100644 --- a/src/content/docs/logs/get-started/enable-destinations/r2.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/r2.mdx @@ -6,7 +6,7 @@ sidebar: --- -import { Render } from "~/components" +import { Render, APIRequest } from "~/components" Cloudflare Logpush supports pushing logs directly to R2. You can do so via the automatic setup (Cloudflare creates an R2 bucket for you), or you can create your own R2 bucket with the custom setup. The automatic setup is ideal for quickly setting up a bucket or for testing purposes. Instead, use the custom setup if you need full control over the configuration. @@ -103,22 +103,31 @@ r2:///{DATE}?account-id=&access-key-id=" \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "output_options": { - "field_names": ["ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI", "EdgeEndTimestamp","EdgeResponseBytes", "EdgeResponseStatus", "EdgeStartTimestamp", "RayID"], - "timestamp_format": "rfc3339" - }, - "destination_conf": "r2:///{DATE}?account-id=&access-key-id=&secret-access-key=", - "dataset": "http_requests", - "enabled": true -}' -``` +", + output_options: { + field_names: [ + "ClientIP", + "ClientRequestHost", + "ClientRequestMethod", + "ClientRequestURI", + "EdgeEndTimestamp", + "EdgeResponseBytes", + "EdgeResponseStatus", + "EdgeStartTimestamp", + "RayID" + ], + timestamp_format: "rfc3339" + }, + destination_conf: + "r2:///{DATE}?account-id=&access-key-id=&secret-access-key=", + dataset: "http_requests", + enabled: true + }} +/> ## Download logs from R2 diff --git a/src/content/docs/logs/get-started/enable-destinations/s3-compatible-endpoints.mdx b/src/content/docs/logs/get-started/enable-destinations/s3-compatible-endpoints.mdx index b95db3156601927..96bb6327499cadd 100644 --- a/src/content/docs/logs/get-started/enable-destinations/s3-compatible-endpoints.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/s3-compatible-endpoints.mdx @@ -9,7 +9,7 @@ head: --- -import { Render } from "~/components" +import { Render, APIRequest } from "~/components" Cloudflare Logpush supports pushing logs to S3-compatible destinations via the Cloudflare dashboard or via API, including: @@ -96,21 +96,31 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol Example request using cURL: -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "destination_conf": "s3:///?region=&access-key-id=&secret-access-key=&endpoint=", - "output_options": { - "field_names": ["ClientIP", "ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI","EdgeEndTimestamp", "EdgeResponseBytes", "EdgeResponseStatus", "EdgeStartTimestamp", "RayID"], - "timestamp_format": "rfc3339" - }, - "dataset": "http_requests" -}' -``` +", + destination_conf: + "s3:///?region=&access-key-id=&secret-access-key=&endpoint=", + output_options: { + field_names: [ + "ClientIP", + "ClientIP", + "ClientRequestHost", + "ClientRequestMethod", + "ClientRequestURI", + "EdgeEndTimestamp", + "EdgeResponseBytes", + "EdgeResponseStatus", + "EdgeStartTimestamp", + "RayID" + ], + timestamp_format: "rfc3339" + }, + dataset: "http_requests" + }} +/> Response: diff --git a/src/content/docs/logs/get-started/enable-destinations/splunk.mdx b/src/content/docs/logs/get-started/enable-destinations/splunk.mdx index f1b880fe07d5ca9..bcbe8f31276164d 100644 --- a/src/content/docs/logs/get-started/enable-destinations/splunk.mdx +++ b/src/content/docs/logs/get-started/enable-destinations/splunk.mdx @@ -8,7 +8,7 @@ head: content: Enable Logpush to Splunk --- -import { Render } from "~/components"; +import { Render, APIRequest } from "~/components"; The [HTTP Event Collector (HEC)](https://dev.splunk.com/enterprise/docs/devtools/httpeventcollector/) is a reliable method to receive data from Splunk Enterprise or Splunk Cloud Platform. Cloudflare Logpush supports pushing logs directly to Splunk HEC via the Cloudflare dashboard or API. @@ -89,21 +89,29 @@ Cloudflare highly recommends setting this value to fals Example request using cURL: -```bash -curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "name": "", - "destination_conf": "splunk://?channel=&insecure-skip-verify=&sourcetype=&header_Authorization=", - "output_options": { - "field_names": ["ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI", "EdgeEndTimestamp","EdgeResponseBytes", "EdgeResponseStatus", "EdgeStartTimestamp", "RayID"], - "timestamp_format": "rfc3339" - }, - "dataset": "http_requests" -}' -``` +", + destination_conf: "splunk://?channel=&insecure-skip-verify=&sourcetype=&header_Authorization=", + output_options: { + field_names: [ + "ClientIP", + "ClientRequestHost", + "ClientRequestMethod", + "ClientRequestURI", + "EdgeEndTimestamp", + "EdgeResponseBytes", + "EdgeResponseStatus", + "EdgeStartTimestamp", + "RayID" + ], + timestamp_format: "rfc3339" + }, + dataset: "http_requests" + }} +/> Response: @@ -135,16 +143,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. Use the job Example request using cURL: -```bash -curl --request PUT \ -https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \ ---header "X-Auth-Email: " \ ---header "X-Auth-Key: " \ ---header "Content-Type: application/json" \ ---data '{ - "enabled": true -}' -``` + Response: