diff --git a/public/__redirects b/public/__redirects
index 765ac0a71a3d472..d8b678d8f7edb12 100644
--- a/public/__redirects
+++ b/public/__redirects
@@ -138,6 +138,10 @@
# ai
/ai/ /use-cases/ai/ 301
+# ai-audit
+/ai-audit/features/detect-ai-crawlers/ /ai-audit/features/analyze-ai-crawlers/ 301
+/ai-audit/features/enforce-robots-txt/ /ai-audit/features/analyze-ai-crawlers/ 301
+
# AI Gateway
/ai-gateway/get-started/configuring-settings/ /ai-gateway/get-started/ 301
/ai-gateway/get-started/connecting-applications/ /ai-gateway/get-started/ 301
diff --git a/src/assets/images/changelog/ai-audit/analyze-metrics.png b/src/assets/images/changelog/ai-audit/analyze-metrics.png
new file mode 100644
index 000000000000000..20c4f01a8fc033d
Binary files /dev/null and b/src/assets/images/changelog/ai-audit/analyze-metrics.png differ
diff --git a/src/assets/images/changelog/ai-audit/manage-ai-crawlers.png b/src/assets/images/changelog/ai-audit/manage-ai-crawlers.png
new file mode 100644
index 000000000000000..37cac16ee3e3304
Binary files /dev/null and b/src/assets/images/changelog/ai-audit/manage-ai-crawlers.png differ
diff --git a/src/content/changelog/ai-audit/2024-09-23-ai-audit-launch.mdx b/src/content/changelog/ai-audit/2024-09-23-ai-audit-launch.mdx
index 16fa2ab301ff9de..317488e0f220cd1 100644
--- a/src/content/changelog/ai-audit/2024-09-23-ai-audit-launch.mdx
+++ b/src/content/changelog/ai-audit/2024-09-23-ai-audit-launch.mdx
@@ -9,7 +9,7 @@ Every site on Cloudflare now has access to [**AI Audit**](/ai-audit/), which sum
You can use this data to:
- Understand how and how often crawlers access your site (and which content is the most popular).
-- Block some or all of the AI bots accessing your site.
+- Block specific AI bots accessing your site.
- Use Cloudflare to enforce your `robots.txt` policy via an automatic WAF rule.

diff --git a/src/content/changelog/ai-audit/2025-06-30-refresh.mdx b/src/content/changelog/ai-audit/2025-06-30-refresh.mdx
new file mode 100644
index 000000000000000..6411d73c1fe5c6d
--- /dev/null
+++ b/src/content/changelog/ai-audit/2025-06-30-refresh.mdx
@@ -0,0 +1,19 @@
+---
+title: AI Audit refresh
+description: More intuitive AI crawler management
+date: 2025-07-01T11:00:00Z
+---
+
+We redesigned the AI Audit dashboard to provide more intuitive and granular control over AI crawlers.
+
+- From the new **AI Crawlers** tab: block specific AI crawlers.
+- From the new **Metrics** tab: view AI Audit metrics.
+
+
+
+
+
+To get started, explore:
+
+- [Manage AI crawlers](/ai-audit/features/manage-ai-crawlers/).
+- [Analyze AI crawlers](/ai-audit/features/analyze-ai-crawlers/).
diff --git a/src/content/docs/ai-audit/features/analyze-ai-crawlers.mdx b/src/content/docs/ai-audit/features/analyze-ai-crawlers.mdx
new file mode 100644
index 000000000000000..7a6aa6fec86cc6e
--- /dev/null
+++ b/src/content/docs/ai-audit/features/analyze-ai-crawlers.mdx
@@ -0,0 +1,56 @@
+---
+title: Analyze AI crawlers
+pcx_content_type: concept
+sidebar:
+ order: 7
+---
+
+import { Tabs, TabItem } from "~/components";
+
+AI Audit metrics provides you with key insights on how AI crawlers are interacting with your website.
+
+To analyze AI crawlers:
+
+1. Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/), and select your account and domain.
+2. Go to **AI Audit**.
+3. Go to the **Metrics** tab.
+
+## View AI Audit metrics
+
+AI Audit provides you with the following metrics to help you understand how AI crawlers are interacting with your website.
+
+| Metric | Description |
+| ---------------------------------- | ------------------------------------------------------------------------ |
+| Total requests | The total number of requests to crawl your website, from all AI crawlers |
+| Allowed requests | The number of requests you have allowed (by allowing AI crawlers) |
+| Blocked requests | The number of requests you have blocked (by blocking AI crawlers) |
+| Requests by AI crawlers | A graph which displays the number of crawl requests from each AI crawler |
+| Most popular paths by AI crawlers | The most popular pages crawled by AI crawlers, for each AI crawler |
+| Hosts | Top five most requested hosts (website domains) |
+| Paths | Top five most requested paths |
+| AI crawler operators | Top five most active AI crawler operators (by requests) |
+
+## Filter date range
+
+You can use the date filter to choose the period of time you wish to analyze.
+
+
+
+
+Filter options:
+
+- Past 24 hours
+
+
+
+Filter options:
+
+- Past 24 hours
+- Past 7 days
+- Past 14 days
+- Past month
+
+
+
+
+The values of the AI Audit metrics will update according to your filter.
\ No newline at end of file
diff --git a/src/content/docs/ai-audit/features/detect-ai-crawlers.mdx b/src/content/docs/ai-audit/features/detect-ai-crawlers.mdx
deleted file mode 100644
index 21a9ffff7639548..000000000000000
--- a/src/content/docs/ai-audit/features/detect-ai-crawlers.mdx
+++ /dev/null
@@ -1,46 +0,0 @@
----
-title: Detect AI crawlers
-pcx_content_type: concept
-sidebar:
- order: 2
----
-
-AI Audit metrics provides you with insight on how AI crawlers are interacting with your website.
-
-## View AI Audit metrics
-
-AI Audit provides you with the following metrics to help you understand how AI crawlers are interacting with your website.
-
-| Metric | Description |
-| --------------------------------- | ------------------------------------------------------------------------ |
-| Request by AI crawlers | A graph which displays the number of crawl requests from each AI crawler |
-| Summary | A list of AI crawlers with the most number of crawl requests |
-| Most popular paths by AI crawlers | The most popular pages crawled by AI crawlers, for each AI crawler |
-
-The **Summary** table also enables you to [Enforce your robots.txt](/ai-audit/features/enforce-robots-txt/).
-
-## Filter AI crawler data
-
-You can use filters to narrow the scope of your result.
-
-- **Provider:** Filter by the AI crawler owners.
-- **Bot type:** Filter by the type of the AI bot (for example, AI crawler, AI assistant, or archiver).
-- **Date range:** Filter the date range of your results. You can choose from three predetermined date ranges:
- - Past 7 days
- - Past 14 days
- - Past month
-
-The values of the AI Audit metrics will update according to your filter.
-
-## Filter subdomains
-
-You can use the subdomain filter to narrow the scope of your result.
-
-From the dropdown, select either **All subdomains**, or the specific subdomain you wish to view.
-
-Selecting a specific subdomain allows you to access:
-
-- **Violations only** toggle: Toggles the AI Audit page to only display bots which are violating your configured rules.
-- [**Enforce robots.txt policy**](/ai-audit/features/enforce-robots-txt/): Ensure bots cannot access webpages which are off-limits, as specified in your `robots.txt` file.
-
-The values of the AI Audit metrics will update according to your filter.
diff --git a/src/content/docs/ai-audit/features/enforce-robots-txt.mdx b/src/content/docs/ai-audit/features/enforce-robots-txt.mdx
deleted file mode 100644
index b5b0d2e52572b6b..000000000000000
--- a/src/content/docs/ai-audit/features/enforce-robots-txt.mdx
+++ /dev/null
@@ -1,37 +0,0 @@
----
-title: Enforce robots.txt
-pcx_content_type: concept
-sidebar:
- order: 5
----
-
-import { Steps } from "~/components";
-
-AI Audit allows you to enforce [`robots.txt`](/radar/glossary/#robotstxt) which instructs bots which webpages they can and cannot access.
-
-To enforce `robots.txt`:
-
-
-1. Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/), and select your account and domain.
-2. Go to **AI Audit**.
-3. From the dropdown at the top of the page, select a specific subdomain where you wish you enforce `robots.txt`.
-4. From **Summary**, select **Enforce robots.txt policy**.
-5. From the **Enforce your robots.txt policy** page, select **Go to WAF custom rules**.
-6. From the **New custom rule** page, name your custom rule.
- - The page will automatically populate the values for the custom rule.
-7. From **Then take action...**:
- - For **Choose action**, select **Block**.
- - For **With response type**, select **Default Cloudflare WAF block page**.
-8. From **Place at**:
- - For **Select order**, select **Last**.
-9. Select **Deploy**.
-
-
-This custom rule ensures that bots cannot access the pages specified in your `robots.txt` file.
-
-## Related resources
-
-For more information, refer to the following resources.
-
-- [What is robots.txt? | How a robots.txt file works](https://www.cloudflare.com/en-gb/learning/bots/what-is-robots-txt/)
-- [Direct AI crawlers with managed robots.txt](/bots/additional-configurations/managed-robots-txt/)
diff --git a/src/content/docs/ai-audit/features/manage-ai-crawlers.mdx b/src/content/docs/ai-audit/features/manage-ai-crawlers.mdx
new file mode 100644
index 000000000000000..951a9289b2fc94a
--- /dev/null
+++ b/src/content/docs/ai-audit/features/manage-ai-crawlers.mdx
@@ -0,0 +1,59 @@
+---
+title: Manage AI crawlers
+pcx_content_type: concept
+sidebar:
+ order: 2
+---
+
+import { Steps, GlossaryTooltip, Tabs, TabItem } from "~/components";
+
+AI Audit enables you to take specific action for each AI crawler.
+
+To manage AI crawlers:
+
+1. Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/), and select your account and domain.
+2. Go to **AI Audit**.
+3. Go to the **AI Crawlers** tab.
+
+## View the list of AI crawlers
+
+The **AI Crawlers** tab displays a table of AI crawlers that are requesting access to your content, and how they interact with your pages. The table provides the following information, depending on the type of plan you are on.
+
+
+
+| Column | Details |
+| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| AI Crawlers | The name of the AI crawler. |
+| Operator | The name of the entity who owns the AI crawler. Note that an operator may have multiple AI crawlers. |
+| Category | The category of the AI crawler. Refer to [Verified bot categories](/bots/concepts/bot/verified-bots/categories/). |
+| Block | Toggle for blocking specific AI crawlers. |
+
+
+
+| Column | Details |
+| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| AI Crawlers | The name of the AI crawler. |
+| Operator | The name of the entity who owns the AI crawler. Note that an operator may have multiple AI crawlers. |
+| Category | The category of the AI crawler. Refer to [Verified bot categories](/bots/concepts/bot/verified-bots/categories/). |
+| Requests | The number of times the AI crawler has requested to crawl your content. |
+| Robots.txt violations | The number of times the AI crawler has violated your `robots.txt` file. |
+| Block | Toggle for blocking specific AI crawlers. |
+
+
+
+
+## Block access
+
+You can block an AI crawler to completely stop the AI crawler from scraping your webpage.
+
+Use the toggles in the **Block** column to block specific AI crawlers from accessing your content.
+
+## Filter AI crawler data
+
+You can use filters to narrow the scope of your result.
+
+- **Name:** Search the name of the AI crawler.
+- **Operator:** Filter by the AI crawler operator.
+- **Category:** Filter by the category of the AI crawler (for example, AI crawler, AI assistant, or archiver).
+
+The values of the table will update according to your filter.
diff --git a/src/content/docs/ai-audit/get-started.mdx b/src/content/docs/ai-audit/get-started.mdx
index 06e8d8000707071..20298be86e00382 100644
--- a/src/content/docs/ai-audit/get-started.mdx
+++ b/src/content/docs/ai-audit/get-started.mdx
@@ -11,12 +11,12 @@ head:
description: Learn how to set up AI Audit.
---
-import { Details, Render, Steps } from "~/components";
+import { Details, Render, Steps, Tabs, TabItem, GlossaryTooltip } from "~/components";
This guide instructs you on how to:
- View AI crawlers that are interacting with pages in your domain (a [Cloudflare zone](/fundamentals/concepts/accounts-and-zones/#zones)).
-- Create a rule to block AI crawlers on your pages.
+- Use AI Audit to block individual crawlers from accessing your content.
## Prerequisites
@@ -24,63 +24,64 @@ This guide instructs you on how to:
2. [Connect your domain to Cloudflare](/fundamentals/manage-domains/add-site/).
3. Make sure your domain is [proxying traffic through Cloudflare](/fundamentals/concepts/how-cloudflare-works/#cloudflare-as-a-reverse-proxy).
-## 1. Block all AI crawlers
+## 1. Block specific AI crawlers
-To use AI Audit:
+
+
+
+To only block specific AI crawlers:
{/* prettier-ignore */}
-1. Log in to the [Cloudflare dashboard](https://dash.cloudflare.com/), and select your account and domain.
-2. Go to **AI Audit**.
-3. From **Most Popular Paths**, select **Block All**.
-4. From the **Bot traffic** page, under **Block AI Bots**, select **Enable**.
+1. Go to **AI Audit**.
+2. From **AI Crawlers** tab, go to the list of AI crawlers.
+3. Review the AI crawlers that are accessing your domain.
+4. From the **Block** column, toggle the switch for AI crawlers you with to block.
-This feature also enables Cloudflare's [Bot Fight Mode: Block AI bots](/bots/get-started/bot-fight-mode/#block-ai-bots).
-
-You can also create more complex rules when taking action on AI crawlers. For more information on creating more specific rules, refer to [Create a custom rule in the dashboard
-](/waf/custom-rules/create-dashboard/).
-
-
-
-
-
-:::note
-
-For more details on how this rule interacts with other Cloudflare settings, refer to [How it works](/bots/concepts/bot/#how-it-works).
+:::note[Quality of AI crawler detection]
+On the free plan, AI Audit identifies AI crawlers based on their [user agent strings](https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/User-Agent). This enables AI Audit to detect easy-to-detect (well-known) AI crawlers.
+Upgrade your plan to enable a more thorough detection using Cloudflare's [Bot Management detection ID](/bots/reference/bot-management-variables/#ruleset-engine-fields) field.
:::
-## 2. Block specific bot categories (Enterprise plan only)
+
-Customers on the Enterprise plan -- and with a [Bot Management subscription](/bots/plans/bm-subscription/) -- can choose to only block specific AI crawlers, while allowing others.
+To only block specific AI crawlers:
{/* prettier-ignore */}
-1. Go to the **AI Audit**.
-2. From **Most Popular Paths**, select **Block Some**.
-3. From the **Security rules** page, select **Create rule** > **Custom rules**.
-4. Provide a name for the custom rule. For example, "Block unwanted AI crawlers".
-5. From the **Field** dropdown, select **Verified Bot Category**.
-6. From the **Value** dropdown, select the specific bot category you wish to block.
- - You can use **And** / **Or** buttons to add additional conditions. For example, you can use multiple **Or** options to include multiple bot categories in the same rule.
-7. From the **Then take action...** section:
- - For **Choose action**, select **Block**.
- - For **With response type**, select **Default Cloudflare WAF block page**.
-8. From the **Place at** section:
- - For **Select order**, select **First**.
-9. Select **Save**.
+1. Go to **AI Audit**.
+2. From **AI Crawlers** tab, go to the list of AI crawlers.
+3. Review key information such as:
+ - Bot operator
+ - Number of requests sent by the AI crawler
+ - Whether the AI crawler is allowed according to your `robots.txt`, or the number of times the AI crawler has violated your `robots.txt`.
+4. From the **Block** column, toggle the switch for AI crawlers you with to block.
-This custom rule will only block the AI bots which belong to the [verified bot categories](/bots/concepts/bot/verified-bots/categories/) you have included in your rule (in step 6).
+
+
-For more information on creating a custom WAF rule, refer to [Create a custom rule in the dashboard](/waf/custom-rules/create-dashboard/).
+For more information, refer to [Manage AI crawlers](/ai-audit/features/manage-ai-crawlers/).
-## 3. Review detected AI crawlers
+You can also create more complex rules when taking action on AI crawlers, using [Cloudflare WAF](/waf/). For more information on creating more specific rules, refer to [Create a custom rule in the dashboard](/waf/custom-rules/create-dashboard/).
-Review the AI crawlers detected on your site in the **Metrics** tab of the Cloudflare dashboard for key metrics.
+## 2. Review detected AI crawlers
+
+
+
+
+Review the AI crawlers detected on your site.
+
+
+1. Go to **AI Audit**.
+2. From the **Metrics** tab, view key metrics on AI crawlers.
+
+
+
-Refer to [Detect AI crawlers](/ai-audit/features/detect-ai-crawlers/) for more information.
+Note that on free plans, the **Metrics** tab only displays metrics for the past 24 hours.
## Related resources
diff --git a/src/content/docs/ai-audit/index.mdx b/src/content/docs/ai-audit/index.mdx
index cddfcd2cc40d7e2..1cc41e788a95833 100644
--- a/src/content/docs/ai-audit/index.mdx
+++ b/src/content/docs/ai-audit/index.mdx
@@ -33,19 +33,19 @@ AI Audit is currently only available as a beta product.
## Features
- Displays information about AI crawlers in your domains' pages.
+ Allow or block individual AI crawlers.
- Enforce your `robots.txt` with a Cloudflare WAF rule.
+ Analyze how AI crawlers interact with your domains.
---
diff --git a/src/content/docs/ai-audit/reference/glossary.mdx b/src/content/docs/ai-audit/reference/glossary.mdx
new file mode 100644
index 000000000000000..0060ea697e269c4
--- /dev/null
+++ b/src/content/docs/ai-audit/reference/glossary.mdx
@@ -0,0 +1,13 @@
+---
+title: Glossary
+pcx_content_type: glossary
+sidebar:
+ order: 12
+
+---
+
+import { Glossary } from "~/components"
+
+Review the definitions for terms used across Cloudflare's AI Audit documentation.
+
+
diff --git a/src/content/docs/ai-audit/reference/index.mdx b/src/content/docs/ai-audit/reference/index.mdx
new file mode 100644
index 000000000000000..f4a7b3757467823
--- /dev/null
+++ b/src/content/docs/ai-audit/reference/index.mdx
@@ -0,0 +1,12 @@
+---
+title: Reference
+pcx_content_type: navigation
+sidebar:
+ group:
+ hideIndex: true
+ order: 10
+---
+
+import { DirectoryListing } from "~/components";
+
+
diff --git a/src/content/glossary/ai-audit.yaml b/src/content/glossary/ai-audit.yaml
new file mode 100644
index 000000000000000..526bd02b39a22f9
--- /dev/null
+++ b/src/content/glossary/ai-audit.yaml
@@ -0,0 +1,8 @@
+---
+productName: AI Audit
+entries:
+ - term: robots.txt
+ general_definition: |-
+ A text file which lists pages in your website that are off-limits for bots. Well-behaved bots respect this file, but some bots may violate it. You can [enforce robots.txt](/ai-audit/features/enforce-robots-txt/) with Cloudflare WAF custom rules.
+
+
diff --git a/src/content/release-notes/bots.yaml b/src/content/release-notes/bots.yaml
index 989dfb305fcf3de..11b7afcb5e826ae 100644
--- a/src/content/release-notes/bots.yaml
+++ b/src/content/release-notes/bots.yaml
@@ -11,7 +11,7 @@ entries:
- publish_date: "2025-05-08"
title: Machine Learning model v9 is now the default model
description: |-
- [Machine Learning model v9](/bots/reference/machine-learning-models/#model-versions-and-release-notes) is now the default model for all new zones and existing zones set to use the latest machine learning model.
+ [Machine Learning model v9](/bots/reference/machine-learning-models/#model-versions-and-release-notes) is now the default model for all new zones and existing zones set to use the latest machine learning model.
- publish_date: "2025-04-28"
title: Managed robots.txt is now available
description: |-