Skip to content

Commit 3e36dfa

Browse files
angelampcostasdnts
authored andcommitted
Adds APIRequest component to Logs destinations (cloudflare#23594)
1 parent 04639c4 commit 3e36dfa

File tree

9 files changed

+262
-215
lines changed

9 files changed

+262
-215
lines changed

src/content/docs/logs/get-started/enable-destinations/datadog.mdx

Lines changed: 32 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ head:
99

1010
---
1111

12-
import { Render, TabItem, Tabs } from "~/components"
12+
import { Render, TabItem, Tabs, APIRequest } from "~/components"
1313

1414
Cloudflare Logpush supports pushing logs directly to Datadog via the Cloudflare dashboard or via API.
1515

@@ -100,21 +100,30 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol
100100

101101
Example request using cURL:
102102

103-
```bash
104-
curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \
105-
--header "X-Auth-Email: <EMAIL>" \
106-
--header "X-Auth-Key: <API_KEY>" \
107-
--header "Content-Type: application/json" \
108-
--data '{
109-
"name": "<DOMAIN_NAME>",
110-
"destination_conf": "datadog://<DATADOG_ENDPOINT_URL>?header_DD-API-KEY=<DATADOG_API_KEY>&ddsource=cloudflare&service=<SERVICE>&host=<HOST>&ddtags=<TAGS>",
111-
"output_options": {
112-
"field_names": ["ClientIP", "ClientRequestHost", "ClientRequestMethod", "ClientRequestURI", "EdgeEndTimestamp", "EdgeResponseBytes", "EdgeResponseStatus" ,"EdgeStartTimestamp", "RayID"],
113-
"timestamp_format": "rfc3339"
114-
},
115-
"dataset": "http_requests"
116-
}'
117-
```
103+
<APIRequest
104+
path="/zones/{zone_id}/logpush/jobs"
105+
method="POST"
106+
json={{
107+
name: "<DOMAIN_NAME>",
108+
destination_conf:
109+
"datadog://<DATADOG_ENDPOINT_URL>?header_DD-API-KEY=<DATADOG_API_KEY>&ddsource=cloudflare&service=<SERVICE>&host=<HOST>&ddtags=<TAGS>",
110+
output_options: {
111+
field_names: [
112+
"ClientIP",
113+
"ClientRequestHost",
114+
"ClientRequestMethod",
115+
"ClientRequestURI",
116+
"EdgeEndTimestamp",
117+
"EdgeResponseBytes",
118+
"EdgeResponseStatus",
119+
"EdgeStartTimestamp",
120+
"RayID"
121+
],
122+
timestamp_format: "rfc3339"
123+
},
124+
dataset: "http_requests"
125+
}}
126+
/>
118127

119128
Response:
120129

@@ -146,16 +155,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. You will use
146155

147156
Example request using cURL:
148157

149-
```bash
150-
curl --request PUT \
151-
https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \
152-
--header "X-Auth-Email: <EMAIL>" \
153-
--header "X-Auth-Key: <API_KEY>" \
154-
--header "Content-Type: application/json" \
155-
--data '{
156-
"enabled": true
157-
}'
158-
```
158+
<APIRequest
159+
path="/zones/{zone_id}/logpush/jobs/{job_id}"
160+
method="PUT"
161+
json={{
162+
enabled: true
163+
}}
164+
/>
159165

160166
Response:
161167

src/content/docs/logs/get-started/enable-destinations/elastic.mdx

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ head:
99

1010
---
1111

12+
import { APIRequest } from "~/components"
13+
1214
Push your Cloudflare logs to Elastic for instant visibility and insights. Enabling this integration with Elastic comes with a predefined dashboard to view all of your Cloudflare observability and security data with ease.
1315

1416
The Cloudflare Logpush integration can be used in three different modes to collect data:
@@ -39,21 +41,19 @@ Add the same custom header along with its value on both sides for additional sec
3941

4042
For example, while creating a job along with a header and value for a particular dataset:
4143

42-
```bash
43-
curl --location https://api.cloudflare.com/zones/{zone_id}/logpush/jobs \
44-
--header "X-Auth-Email: <EMAIL>" \
45-
--header "X-Auth-Key: <API_KEY>" \
46-
--header "Content-Type: application/json" \
47-
--data-raw '{
48-
"name": "<PUBLIC_DOMAIN>",
49-
"destination_conf": "https://<PUBLIC_DOMAIN>:<PUBLIC_PORT>?header_<SECRET_HEADER>=<SECRET_VALUE>",
50-
"dataset": "http_requests",
51-
"output_options": {
52-
"field_names": ["RayID","EdgeStartTimestamp"],
53-
"timestamp_format": "rfc3339"
54-
}
55-
}'
56-
```
44+
<APIRequest
45+
path="/zones/{zone_id}/logpush/jobs"
46+
method="POST"
47+
json={{
48+
name: "<PUBLIC_DOMAIN>",
49+
destination_conf: "https://<PUBLIC_DOMAIN>:<PUBLIC_PORT>?header_<SECRET_HEADER>=<SECRET_VALUE>",
50+
dataset: "http_requests",
51+
output_options: {
52+
field_names: ["RayID", "EdgeStartTimestamp"],
53+
timestamp_format: "rfc3339"
54+
}
55+
}}
56+
/>
5757

5858
## Enable the Integration in Elastic
5959

src/content/docs/logs/get-started/enable-destinations/http.mdx

Lines changed: 18 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ sidebar:
66

77
---
88

9-
import { Render } from "~/components"
9+
import { Render, APIRequest } from "~/components"
1010

1111
Cloudflare Logpush now supports the ability to send logs to configurable HTTP endpoints.
1212

@@ -68,21 +68,20 @@ The `ownership_challenge` parameter is not required to create a Logpush job to a
6868

6969
## Example curl request
7070

71-
```bash
72-
curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \
73-
--header "X-Auth-Email: <EMAIL>" \
74-
--header "X-Auth-Key: <API_KEY>" \
75-
--header "Content-Type: application/json" \
76-
--data '{
77-
"name": "theburritobot.com-https",
78-
"output_options": {
79-
"field_names": ["EdgeStartTimestamp", "RayID"],
80-
"timestamp_format": "rfc3339"
81-
},
82-
"destination_conf": "https://logs.example.com?header_Authorization=Basic%20REDACTED&tags=host:theburritobot.com,dataset:http_requests",
83-
"max_upload_bytes": 5000000,
84-
"max_upload_records": 1000,
85-
"dataset": "http_requests",
86-
"enabled": true
87-
}'
88-
```
71+
<APIRequest
72+
path="/zones/{zone_id}/logpush/jobs"
73+
method="POST"
74+
json={{
75+
name: "theburritobot.com-https",
76+
output_options: {
77+
field_names: ["EdgeStartTimestamp", "RayID"],
78+
timestamp_format: "rfc3339"
79+
},
80+
destination_conf:
81+
"https://logs.example.com?header_Authorization=Basic%20REDACTED&tags=host:theburritobot.com,dataset:http_requests",
82+
max_upload_bytes: 5000000,
83+
max_upload_records: 1000,
84+
dataset: "http_requests",
85+
enabled: true
86+
}}
87+
/>

src/content/docs/logs/get-started/enable-destinations/ibm-cloud-logs.mdx

Lines changed: 29 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ head:
99

1010
---
1111

12+
import { APIRequest } from "~/components"
13+
1214
Cloudflare Logpush supports pushing logs directly to IBM Cloud Logs via API. The dashboard functionality will later be added.
1315

1416
## Manage via API
@@ -43,28 +45,26 @@ To create a job, make a `POST` request to the Logpush jobs endpoint with the fol
4345

4446
Example request using cURL:
4547

46-
```bash
47-
curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \
48-
--header "X-Auth-Email: <EMAIL>" \
49-
--header "X-Auth-Key: <API_KEY>" \
50-
--header "Content-Type: application/json" \
51-
--data '{
52-
"name": "<DOMAIN_NAME>",
53-
"output_options": {
54-
"output_type": "ndjson",
55-
"timestamp_format": "rfc3339",
56-
"batch_prefix": "[",
57-
"batch_suffix": "]",
58-
"record_prefix": "{\"applicationName\":\"ibm-platform-log\",\"subsystemName\":\"internet-svcs:logpush\",\"text\":{",
59-
"record_suffix": "}}",
60-
"record_delimiter": ","
61-
},
62-
"destination_conf": "ibmcl://<INSTANCE_ID>.ingress.<REGION>.logs.cloud.ibm.com/logs/v1/singles?ibm_api_key=<IBM_API_KEY>",
63-
"max_upload_bytes": 2000000,
64-
"dataset": "http_requests",
65-
"enabled": true
66-
}'
67-
```
48+
<APIRequest
49+
method="POST"
50+
path="/zones/{zone_id}/logpush/jobs"
51+
json={{
52+
name: "<DOMAIN_NAME>",
53+
output_options: {
54+
output_type: "ndjson",
55+
timestamp_format: "rfc3339",
56+
batch_prefix: "[",
57+
batch_suffix: "]",
58+
record_prefix: "{\"applicationName\":\"ibm-platform-log\",\"subsystemName\":\"internet-svcs:logpush\",\"text\":{",
59+
record_suffix: "}}",
60+
record_delimiter: ","
61+
},
62+
destination_conf: "ibmcl://<INSTANCE_ID>.ingress.<REGION>.logs.cloud.ibm.com/logs/v1/singles?ibm_api_key=<IBM_API_KEY>",
63+
max_upload_bytes: 2000000,
64+
dataset: "http_requests",
65+
enabled: true
66+
}}
67+
/>
6868

6969
Response:
7070

@@ -103,16 +103,13 @@ To enable a job, make a `PUT` request to the Logpush jobs endpoint. You will use
103103

104104
Example request using cURL:
105105

106-
```bash
107-
curl --request PUT \
108-
https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs/{job_id} \
109-
--header "X-Auth-Email: <EMAIL>" \
110-
--header "X-Auth-Key: <API_KEY>" \
111-
--header "Content-Type: application/json" \
112-
--data '{
113-
"enabled": true
114-
}'
115-
```
106+
<APIRequest
107+
method="PUT"
108+
path="/zones/{zone_id}/logpush/jobs/{job_id}"
109+
json={{
110+
enabled: true
111+
}}
112+
/>
116113

117114
Response:
118115

src/content/docs/logs/get-started/enable-destinations/ibm-qradar.mdx

Lines changed: 52 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@ sidebar:
55
order: 98
66
---
77

8+
import { APIRequest } from "~/components"
9+
810
To configure a QRadar/Cloudflare integration you have the option to use one of the following methods:
911

1012
- [HTTP Receiver protocol](/logs/get-started/enable-destinations/ibm-qradar/#http-receiver-protocol)
@@ -16,45 +18,59 @@ To send Cloudflare logs to QRadar you need to create a [Logpush job to HTTP endp
1618

1719
### Cloudflare Firewall events
1820

19-
```bash
20-
curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \
21-
--header "X-Auth-Email: <EMAIL>" \
22-
--header "X-Auth-Key: <API_KEY>" \
23-
--header "Content-Type: application/json" \
24-
--data '{
25-
"name": "<NAME>",
26-
"output_options": {
27-
"field_names": ["Action", "ClientIP", "ClientASN", "ClientASNDescription", "ClientCountry", "ClientIPClass","ClientRefererHost", "ClientRefererPath", "ClientRefererQuery", "ClientRefererScheme", "ClientRequestHost","ClientRequestMethod", "ClientRequestPath", "ClientRequestProtocol", "ClientRequestQuery", "ClientRequestScheme","ClientRequestUserAgent", "EdgeColoCode", "EdgeResponseStatus", "Kind", "MatchIndex", "Metadata","OriginResponseStatus", "OriginatorRayID", "RayID", "RuleID", "Source", "Datetime"],
28-
"timestamp_format": "rfc3339"
29-
},
30-
"destination_conf": "<QRADAR_URL>:<LOG_SOURCE_PORT>",
31-
"max_upload_bytes": 5000000,
32-
"max_upload_records": 1000,
33-
"dataset": "firewall_events",
34-
"enabled": true
35-
}'
36-
```
21+
<APIRequest
22+
method="POST"
23+
path="/zones/{zone_id}/logpush/jobs"
24+
json={{
25+
name: "<NAME>",
26+
output_options: {
27+
field_names: [
28+
"Action", "ClientIP", "ClientASN", "ClientASNDescription", "ClientCountry", "ClientIPClass",
29+
"ClientRefererHost", "ClientRefererPath", "ClientRefererQuery", "ClientRefererScheme",
30+
"ClientRequestHost", "ClientRequestMethod", "ClientRequestPath", "ClientRequestProtocol",
31+
"ClientRequestQuery", "ClientRequestScheme", "ClientRequestUserAgent", "EdgeColoCode",
32+
"EdgeResponseStatus", "Kind", "MatchIndex", "Metadata", "OriginResponseStatus",
33+
"OriginatorRayID", "RayID", "RuleID", "Source", "Datetime"
34+
],
35+
timestamp_format: "rfc3339"
36+
},
37+
destination_conf: "<QRADAR_URL>:<LOG_SOURCE_PORT>",
38+
max_upload_bytes: 5000000,
39+
max_upload_records: 1000,
40+
dataset: "firewall_events",
41+
enabled: true
42+
}}
43+
/>
44+
3745

3846
### Cloudflare HTTP events
3947

40-
```bash
41-
curl https://api.cloudflare.com/client/v4/zones/{zone_id}/logpush/jobs \
42-
--header "X-Auth-Email: <EMAIL>" \
43-
--header "X-Auth-Key: <API_KEY>" \
44-
--header "Content-Type: application/json" \
45-
--data '{
46-
"name": "<NAME>",
47-
"output_options": {
48-
"field_names": ["ClientRequestMethod", "EdgeResponseStatus", "ClientIP", "ClientSrcPort", "CacheCacheStatus","ClientCountry", "ClientDeviceType", "ClientIPClass", "ClientMTLSAuthCertFingerprint", "ClientMTLSAuthStatus","ClientRegionCode", "ClientRequestBytes", "ClientRequestHost", "ClientRequestPath", "ClientRequestProtocol","ClientRequestReferer", "ClientRequestScheme", "ClientRequestSource", "ClientRequestURI", "ClientRequestUserAgent","ClientSSLCipher", "ClientSSLProtocol", "ClientXRequestedWith", "EdgeEndTimestamp", "EdgeRequestHost","EdgeResponseBodyBytes", "EdgeResponseBytes", "EdgeServerIP", "EdgeStartTimestamp", "SecurityActions","SecurityRuleIDs", "SecuritySources", "OriginIP", "OriginResponseStatus", "OriginSSLProtocol", "ParentRayID", "RayID", "SecurityAction", "WAFAttackScore", "SecurityRuleID", "SecurityRuleDescription", "WAFSQLiAttackScore","WAFXSSAttackScore", "EdgeStartTimestamp"],
49-
"timestamp_format": "rfc3339"
50-
},
51-
"destination_conf": "<QRADAR_URL>:<LOG_SOURCE_PORT>",
52-
"max_upload_bytes": 5000000,
53-
"max_upload_records": 1000,
54-
"dataset": "http_requests",
55-
"enabled": true
56-
}'
57-
```
48+
<APIRequest
49+
method="POST"
50+
path="/zones/{zone_id}/logpush/jobs"
51+
json={{
52+
name: "<NAME>",
53+
output_options: {
54+
field_names: [
55+
"ClientRequestMethod", "EdgeResponseStatus", "ClientIP", "ClientSrcPort", "CacheCacheStatus",
56+
"ClientCountry", "ClientDeviceType", "ClientIPClass", "ClientMTLSAuthCertFingerprint", "ClientMTLSAuthStatus",
57+
"ClientRegionCode", "ClientRequestBytes", "ClientRequestHost", "ClientRequestPath", "ClientRequestProtocol",
58+
"ClientRequestReferer", "ClientRequestScheme", "ClientRequestSource", "ClientRequestURI", "ClientRequestUserAgent",
59+
"ClientSSLCipher", "ClientSSLProtocol", "ClientXRequestedWith", "EdgeEndTimestamp", "EdgeRequestHost",
60+
"EdgeResponseBodyBytes", "EdgeResponseBytes", "EdgeServerIP", "EdgeStartTimestamp", "SecurityActions",
61+
"SecurityRuleIDs", "SecuritySources", "OriginIP", "OriginResponseStatus", "OriginSSLProtocol", "ParentRayID",
62+
"RayID", "SecurityAction", "WAFAttackScore", "SecurityRuleID", "SecurityRuleDescription", "WAFSQLiAttackScore",
63+
"WAFXSSAttackScore", "EdgeStartTimestamp"
64+
],
65+
timestamp_format: "rfc3339"
66+
},
67+
destination_conf: "<QRADAR_URL>:<LOG_SOURCE_PORT>",
68+
max_upload_bytes: 5000000,
69+
max_upload_records: 1000,
70+
dataset: "http_requests",
71+
enabled: true
72+
}}
73+
/>
5874

5975
Cloudflare checks the accessibility of the IP address, port, and validates the certificate of the HTTP Receive log source. If all parameters are valid, a Logpush is created, and starts to send events to HTTP Receiver log source.
6076

0 commit comments

Comments
 (0)