From d5e635249e0b729eff94ed1005a27b03afe2d944 Mon Sep 17 00:00:00 2001 From: Angela Costa Date: Tue, 22 Jul 2025 10:52:37 +0100 Subject: [PATCH] Moves filters --- public/__redirects | 3 ++- .../cloudflare-for-saas/hostname-analytics.mdx | 2 +- src/content/docs/cloudflare-one/insights/logs/logpush.mdx | 2 +- .../data-loss-prevention/dlp-policies/logging-options.mdx | 2 +- src/content/docs/dns/internal-dns/analytics.mdx | 2 +- src/content/docs/logs/get-started/api-configuration.mdx | 2 +- src/content/docs/logs/instant-logs.mdx | 2 +- .../docs/logs/logpush/examples/example-logpush-curl.mdx | 2 +- .../logs/logpush/logpush-job/enable-destinations/aws-s3.mdx | 2 +- .../logs/logpush/logpush-job/enable-destinations/azure.mdx | 2 +- .../logs/logpush/logpush-job/enable-destinations/datadog.mdx | 2 +- .../logpush-job/enable-destinations/google-cloud-storage.mdx | 2 +- .../logs/logpush/logpush-job/enable-destinations/http.mdx | 2 +- .../logpush/logpush-job/enable-destinations/new-relic.mdx | 2 +- .../docs/logs/logpush/logpush-job/enable-destinations/r2.mdx | 2 +- .../enable-destinations/s3-compatible-endpoints.mdx | 2 +- .../logs/logpush/logpush-job/enable-destinations/splunk.mdx | 2 +- .../logpush/logpush-job/enable-destinations/sumo-logic.mdx | 2 +- .../docs/logs/{reference => logpush/logpush-job}/filters.mdx | 4 ++-- src/content/docs/magic-firewall/how-to/filter-views.mdx | 2 +- src/content/docs/workers/observability/logs/logpush.mdx | 2 +- 21 files changed, 23 insertions(+), 22 deletions(-) rename src/content/docs/logs/{reference => logpush/logpush-job}/filters.mdx (98%) diff --git a/public/__redirects b/public/__redirects index 95e5821ba21d232..2fa87cdabe697a9 100644 --- a/public/__redirects +++ b/public/__redirects @@ -965,7 +965,7 @@ /logs/logpush/logpush-dashboard/ /logs/logpush/logpush-job/enable-destinations/ 301 /logs/logpush/s3-compatible-endpoints/ /logs/logpush/logpush-job/enable-destinations/s3-compatible-endpoints/ 301 /logs/reference/logpush-api-configuration/ /logs/get-started/api-configuration/ 301 -/logs/reference/logpush-api-configuration/filters/ /logs/reference/filters/ 301 +/logs/reference/logpush-api-configuration/filters/ /logs/logpush/logpush-job/filters/ 301 # Non-slashed version is being used in the Cloudflare dashboard /logs/reference/logpush-api-configuration/examples/example-logpush-curl/ /logs/logpush/examples/example-logpush-curl/ 301 /logs/log-explorer/ /log-explorer/log-search/ 301 @@ -978,6 +978,7 @@ /logs/tutorials/examples/example-logpush-python/ /logs/logpush/examples/example-logpush-python/ 301 /logs/get-started/alerts-and-analytics/ /logs/logpush/alerts-and-analytics/ 301 /logs/edge-log-delivery/ /logs/logpush/logpush-job/edge-log-delivery/ 301 +/logs/reference/filters/ /logs/logpush/logpush-job/filters/ 301 # magic-firewall /magic-firewall/reference/examples/ /magic-firewall/how-to/add-rules/ 301 diff --git a/src/content/docs/cloudflare-for-platforms/cloudflare-for-saas/hostname-analytics.mdx b/src/content/docs/cloudflare-for-platforms/cloudflare-for-saas/hostname-analytics.mdx index d12ed5bc01bbc6e..148869937bd7198 100644 --- a/src/content/docs/cloudflare-for-platforms/cloudflare-for-saas/hostname-analytics.mdx +++ b/src/content/docs/cloudflare-for-platforms/cloudflare-for-saas/hostname-analytics.mdx @@ -46,7 +46,7 @@ Build custom dashboards to share this information by specifying an individual cu [Logpush](/logs/logpush/) sends metadata from Cloudflare products to your cloud storage destination or SIEM. -Using [filters](/logs/reference/filters/), you can send set sample rates (or not include logs altogether) based on filter criteria. This flexibility allows you to maintain selective logs for custom hostnames without massively increasing your log volume. +Using [filters](/logs/logpush/logpush-job/filters/), you can send set sample rates (or not include logs altogether) based on filter criteria. This flexibility allows you to maintain selective logs for custom hostnames without massively increasing your log volume. Filtering is available for [all Cloudflare datasets](/logs/reference/log-fields/zone/). diff --git a/src/content/docs/cloudflare-one/insights/logs/logpush.mdx b/src/content/docs/cloudflare-one/insights/logs/logpush.mdx index f68fd85035c7a0f..f1341b581011be7 100644 --- a/src/content/docs/cloudflare-one/insights/logs/logpush.mdx +++ b/src/content/docs/cloudflare-one/insights/logs/logpush.mdx @@ -25,7 +25,7 @@ To configure Logpush for Zero Trust logs: 4. Choose a [Logpush destination](/logs/logpush/logpush-job/enable-destinations/). 5. Follow the service-specific instructions to configure and validate your destination. 6. Choose the [Zero Trust datasets](#zero-trust-datasets) to export. -7. Enter a **Job name**, any [filters](/logs/reference/filters/) you would like to add, and the data fields you want to include in the logs. +7. Enter a **Job name**, any [filters](/logs/logpush/logpush-job/filters/) you would like to add, and the data fields you want to include in the logs. 8. (Optional) In **Advanced settings**, choose the timestamp format you prefer and whether you want to enable log sampling. 9. Select **Submit**. diff --git a/src/content/docs/cloudflare-one/policies/data-loss-prevention/dlp-policies/logging-options.mdx b/src/content/docs/cloudflare-one/policies/data-loss-prevention/dlp-policies/logging-options.mdx index 8845381178da605..51566afc7145904 100644 --- a/src/content/docs/cloudflare-one/policies/data-loss-prevention/dlp-policies/logging-options.mdx +++ b/src/content/docs/cloudflare-one/policies/data-loss-prevention/dlp-policies/logging-options.mdx @@ -92,4 +92,4 @@ To set up the DLP Forensic Copy Logpush job: DLP will now send a copy of HTTP requests that match this policy to your Logpush destination. -Logpush supports up to four DLP Forensic Copy Logpush jobs per account. By default, Gateway will send all matched HTTP requests to your configured DLP Forensic Copy jobs. To send specific policy matches to specific jobs, configure [Log filters](/logs/reference/filters/). If the request contains an archive file, DLP will only send up to 100 MB of uncompressed content to your configured storage. +Logpush supports up to four DLP Forensic Copy Logpush jobs per account. By default, Gateway will send all matched HTTP requests to your configured DLP Forensic Copy jobs. To send specific policy matches to specific jobs, configure [Log filters](/logs/logpush/logpush-job/filters/). If the request contains an archive file, DLP will only send up to 100 MB of uncompressed content to your configured storage. diff --git a/src/content/docs/dns/internal-dns/analytics.mdx b/src/content/docs/dns/internal-dns/analytics.mdx index a9860e61e5875c6..004a957715fe44e 100644 --- a/src/content/docs/dns/internal-dns/analytics.mdx +++ b/src/content/docs/dns/internal-dns/analytics.mdx @@ -22,4 +22,4 @@ The [fields](/analytics/graphql-api/getting-started/querying-basics/) added to c Leverage Logpush jobs for [Gateway DNS](/logs/reference/log-fields/account/gateway_dns/#internaldnsfallbackstrategy). For help setting up Logpush, refer to [Get started with Logs](/logs/get-started/). -You can also set up [Logpush filters](/logs/reference/filters/) to only push logs related to a specific [internal zone](/dns/internal-dns/internal-zones/) or [view](/dns/internal-dns/dns-views/) ID. \ No newline at end of file +You can also set up [Logpush filters](/logs/logpush/logpush-job/filters/) to only push logs related to a specific [internal zone](/dns/internal-dns/internal-zones/) or [view](/dns/internal-dns/dns-views/) ID. \ No newline at end of file diff --git a/src/content/docs/logs/get-started/api-configuration.mdx b/src/content/docs/logs/get-started/api-configuration.mdx index 84b4c34b0320ba4..679f7943c784e05 100644 --- a/src/content/docs/logs/get-started/api-configuration.mdx +++ b/src/content/docs/logs/get-started/api-configuration.mdx @@ -246,7 +246,7 @@ Response ## Filter -Use filters to select the events to include and/or remove from your logs. For more information, refer to [Filters](/logs/reference/filters/). +Use filters to select the events to include and/or remove from your logs. For more information, refer to [Filters](/logs/logpush/logpush-job/filters/). ## Sampling rate diff --git a/src/content/docs/logs/instant-logs.mdx b/src/content/docs/logs/instant-logs.mdx index 7f335633a951db9..b818e48e016b62e 100644 --- a/src/content/docs/logs/instant-logs.mdx +++ b/src/content/docs/logs/instant-logs.mdx @@ -45,7 +45,7 @@ Instant Logs has a maximum data rate supported. For high volume domains, we samp - **Filters** - Use filters to drill down into specific events. Filters consist of three parts: key, operator and value. -All supported operators can be found in the [Filters](/logs/reference/filters/) page. +All supported operators can be found in the [Filters](/logs/logpush/logpush-job/filters/) page. Below we have three examples of filters: diff --git a/src/content/docs/logs/logpush/examples/example-logpush-curl.mdx b/src/content/docs/logs/logpush/examples/example-logpush-curl.mdx index 2bc1d1976cb5095..6b17293d68c0f06 100644 --- a/src/content/docs/logs/logpush/examples/example-logpush-curl.mdx +++ b/src/content/docs/logs/logpush/examples/example-logpush-curl.mdx @@ -102,7 +102,7 @@ When using Sumo Logic, you may find it helpful to have [Live Tail](https://help. * Automated timestamp parsing within Sumo Logic; refer to [timestamps from Sumo Logic](https://help.sumologic.com/03Send-Data/Sources/04Reference-Information-for-Sources/Timestamps%2C-Time-Zones%2C-Time-Ranges%2C-and-Date-Formats) for details. * **ownership\_challenge** - Challenge token required to prove destination ownership. * **kind** (optional) - Used to differentiate between Logpush and Edge Log Delivery jobs. Refer to [Kind](/logs/get-started/api-configuration/#kind) for details. -* **filter** (optional) - Refer to [Filters](/logs/reference/filters/) for details. +* **filter** (optional) - Refer to [Filters](/logs/logpush/logpush-job/filters/) for details. ### Response diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/aws-s3.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/aws-s3.mdx index 2933b7d606a0cbd..96153829a5e6906 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/aws-s3.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/aws-s3.mdx @@ -36,7 +36,7 @@ When you are done entering the destination details, select **Continue**. 9. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 10. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/azure.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/azure.mdx index 003c03d5cf6fb2e..2f6a6a873ca561e 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/azure.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/azure.mdx @@ -30,7 +30,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/datadog.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/datadog.mdx index 0f85a0e92725434..b575f7100e2eb09 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/datadog.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/datadog.mdx @@ -43,7 +43,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/google-cloud-storage.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/google-cloud-storage.mdx index 0006ff6e3f674e7..1f9e0e3b6dd9698 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/google-cloud-storage.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/google-cloud-storage.mdx @@ -33,7 +33,7 @@ When you are done entering the destination details, select **Continue**. 9. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 10. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/http.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/http.mdx index c956607e62e446f..482887a0c9044fb 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/http.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/http.mdx @@ -29,7 +29,7 @@ Cloudflare expects that the endpoint is available over HTTPS, using a trusted ce 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/new-relic.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/new-relic.mdx index 0029b8f132e719f..33ba5dd8e1af592 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/new-relic.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/new-relic.mdx @@ -39,7 +39,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/r2.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/r2.mdx index 21f74fe98864433..6f04cf0fa3e1480 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/r2.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/r2.mdx @@ -72,7 +72,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/s3-compatible-endpoints.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/s3-compatible-endpoints.mdx index 96bb6327499cadd..ef3e780fb40fe97 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/s3-compatible-endpoints.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/s3-compatible-endpoints.mdx @@ -45,7 +45,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/splunk.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/splunk.mdx index bcbe8f31276164d..85af81af70f92e1 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/splunk.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/splunk.mdx @@ -32,7 +32,7 @@ When you are done entering the destination details, select **Continue**. 8. In the next step, you need to configure your logpush job: - Enter the **Job name**. - - Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + - Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. - In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/logpush/logpush-job/enable-destinations/sumo-logic.mdx b/src/content/docs/logs/logpush/logpush-job/enable-destinations/sumo-logic.mdx index fcd43306cd86e8e..d78a1da57c3f291 100644 --- a/src/content/docs/logs/logpush/logpush-job/enable-destinations/sumo-logic.mdx +++ b/src/content/docs/logs/logpush/logpush-job/enable-destinations/sumo-logic.mdx @@ -25,7 +25,7 @@ Cloudflare Logpush supports pushing logs directly to Sumo Logic via the Cloudfla 8. In the next step, you need to configure your logpush job: * Enter the **Job name**. - * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/reference/filters/) for more information. Not all datasets have this option available. + * Under **If logs match**, you can select the events to include and/or remove from your logs. Refer to [Filters](/logs/logpush/logpush-job/filters/) for more information. Not all datasets have this option available. * In **Send the following fields**, you can choose to either push all logs to your storage destination or selectively choose which logs you want to push. 9. In **Advanced Options**, you can: diff --git a/src/content/docs/logs/reference/filters.mdx b/src/content/docs/logs/logpush/logpush-job/filters.mdx similarity index 98% rename from src/content/docs/logs/reference/filters.mdx rename to src/content/docs/logs/logpush/logpush-job/filters.mdx index 18cbbd22431ebe3..7674411d628b187 100644 --- a/src/content/docs/logs/reference/filters.mdx +++ b/src/content/docs/logs/logpush/logpush-job/filters.mdx @@ -3,7 +3,7 @@ pcx_content_type: how-to type: overview title: Filters sidebar: - order: 40 + order: 5 --- @@ -95,6 +95,6 @@ To set filters through the dashboard: 3. Select **Add Logpush job**. A modal window will open. 4. Select the dataset you want to push to a storage service. 5. Below **Select data fields**, in the **Filter** section, you can set up your filters. -6. You need to select a [Field](/logs/reference/log-fields/), an [Operator](/logs/reference/filters/#logical-operators), and a **Value**. +6. You need to select a [Field](/logs/reference/log-fields/), an [Operator](/logs/logpush/logpush-job/filters/#logical-operators), and a **Value**. 7. You can connect more filters using `AND` and `OR` logical operators. 8. Select **Next** to continue the setting up of your Logpush job. diff --git a/src/content/docs/magic-firewall/how-to/filter-views.mdx b/src/content/docs/magic-firewall/how-to/filter-views.mdx index 265e6e0e7bd937e..7e3b0481e6aa85c 100644 --- a/src/content/docs/magic-firewall/how-to/filter-views.mdx +++ b/src/content/docs/magic-firewall/how-to/filter-views.mdx @@ -4,7 +4,7 @@ pcx_content_type: how-to --- -You can utilize different [Log filters](/logs/reference/filters/) to only view specific data from Magic Firewall. +You can utilize different [Log filters](/logs/logpush/logpush-job/filters/) to only view specific data from Magic Firewall. ## Filter by enabled or disabled rules diff --git a/src/content/docs/workers/observability/logs/logpush.mdx b/src/content/docs/workers/observability/logs/logpush.mdx index 07465abeb1bea8a..ae31c115096b360 100644 --- a/src/content/docs/workers/observability/logs/logpush.mdx +++ b/src/content/docs/workers/observability/logs/logpush.mdx @@ -69,7 +69,7 @@ curl "https://api.cloudflare.com/client/v4/accounts//logpush/jobs" \ }' | jq . ``` -In Logpush, you can configure [filters](/logs/reference/filters/) and a [sampling rate](/logs/get-started/api-configuration/#sampling-rate) to have more control of the volume of data that is sent to your configured destination. For example, if you only want to receive logs for requests that did not result in an exception, add the following `filter` JSON property below `output_options`: +In Logpush, you can configure [filters](/logs/logpush/logpush-job/filters/) and a [sampling rate](/logs/get-started/api-configuration/#sampling-rate) to have more control of the volume of data that is sent to your configured destination. For example, if you only want to receive logs for requests that did not result in an exception, add the following `filter` JSON property below `output_options`: `"filter":"{\"where\": {\"key\":\"Outcome\",\"operator\":\"!eq\",\"value\":\"exception\"}}"`