Skip to content

Commit aefdd2f

Browse files
authored
Merge branch 'cloudflare:production' into production
2 parents db2047e + 513a16a commit aefdd2f

File tree

44 files changed

+628
-1043
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

44 files changed

+628
-1043
lines changed

public/__redirects

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1628,6 +1628,7 @@
16281628
/turnstile/troubleshooting/troubleshooting-faqs/ /turnstile/frequently-asked-questions/#troubleshooting 301
16291629
/turnstile/tutorials/protecting-your-payment-form-from-attackers-bots-using-turnstile/ /developer-spotlight/ 301
16301630
/turnstile/frequently-asked-questions/ /turnstile/ 301
1631+
/turnstile/tutorials/implicit-vs-explicit-rendering/ /turnstile/get-started/client-side-rendering/ 301
16311632

16321633
# waf
16331634
/waf/about/ /waf/concepts/ 301
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
---
2+
import { z } from "astro:schema";
3+
4+
const props = z.object({
5+
key: z.string(),
6+
mode: z.enum(["append", "replace"]).default("append"),
7+
});
8+
9+
const { key, mode } = props.parse(Astro.props);
10+
---
11+
12+
<div data-extra-flag-key={key} data-extra-flag-mode={mode}>
13+
<slot />
14+
</div>

src/components/WranglerArg.astro

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,9 +8,13 @@ import MetaInfo from "./MetaInfo.astro";
88
const props = z.object({
99
key: z.string(),
1010
definition: z.custom<any>(),
11+
extraDetails: z.object({
12+
content: z.string(),
13+
mode: z.enum(["append", "replace"])
14+
}).optional(),
1115
});
1216
13-
const { key, definition } = props.parse(Astro.props);
17+
const { key, definition, extraDetails } = props.parse(Astro.props);
1418
1519
const type = definition.type ?? definition.choices;
1620
const description = definition.description ?? definition.describe;
@@ -46,5 +50,12 @@ if (alias) {
4650
{aliasText && <MetaInfo text={aliasText} />}
4751
{required && <MetaInfo text="required" />}
4852
{defaultValue !== undefined && <MetaInfo text={`default: ${defaultValue}`} />}
49-
<Fragment set:html={marked.parse(sanitizedDescription)} />
53+
{extraDetails?.mode === "replace" ? (
54+
<Fragment set:html={marked.parse(extraDetails.content)} />
55+
) : (
56+
<>
57+
<Fragment set:html={marked.parse(sanitizedDescription)} />
58+
{extraDetails && <Fragment set:html={marked.parse(extraDetails.content)} />}
59+
</>
60+
)}
5061
</li>

src/components/WranglerCommand.astro

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import WranglerArg from "./WranglerArg.astro";
66
import Details from "./Details.astro";
77
import { marked } from "marked";
88
import { commands, getCommand } from "~/util/wrangler";
9+
import { parse } from "node-html-parser";
910
1011
const props = z.object({
1112
command: z.string(),
@@ -19,6 +20,19 @@ const definition = getCommand(command);
1920
2021
description ??= definition.metadata.description;
2122
23+
// some commands are present but marked as "hidden" and shouldn't be shown
24+
let hidden = false;
25+
26+
if (definition.metadata.hidden) {
27+
hidden = true;
28+
}
29+
30+
if (hidden) {
31+
throw new Error(
32+
`[WranglerCommand] "${command}" is marked as hidden. If you want to publish, fix upstream in workers-sdk repo.`
33+
);
34+
}
35+
2236
if (!definition.args) {
2337
console.warn(`[WranglerCommand] "${command}" has no arguments`);
2438
}
@@ -30,6 +44,27 @@ const positionals = definition.positionalArgs
3044
.join(" ");
3145
3246
const positionalSet = new Set(definition.positionalArgs);
47+
48+
// Extract ExtraFlagDetails from slot
49+
const slotContent = await Astro.slots.render("default");
50+
const extraFlagDetailsMap = new Map<string, { content: string; mode: "append" | "replace" }>();
51+
52+
if (slotContent) {
53+
const html = parse(slotContent);
54+
const extraFlagElements = html.querySelectorAll("div[data-extra-flag-key]");
55+
56+
for (const element of extraFlagElements) {
57+
const key = element.getAttribute("data-extra-flag-key");
58+
const mode = element.getAttribute("data-extra-flag-mode") || "append";
59+
60+
if (key) {
61+
extraFlagDetailsMap.set(key, {
62+
content: element.innerHTML.trim(),
63+
mode: mode as "append" | "replace"
64+
});
65+
}
66+
}
67+
}
3368
---
3469

3570
<AnchorHeading depth={headingLevel} title={`\`${command}\``} />
@@ -53,6 +88,7 @@ const positionalSet = new Set(definition.positionalArgs);
5388
<WranglerArg
5489
key={key}
5590
definition={{ ...value, positional: positionalSet.has(key) }}
91+
extraDetails={extraFlagDetailsMap.get(key)}
5692
/>
5793
);
5894
})}

src/components/WranglerNamespace.astro

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import WranglerCommand from "./WranglerCommand.astro";
55
66
const props = z.object({
77
namespace: z.string(),
8-
headingLevel: z.number().default(2)
8+
headingLevel: z.number().default(2),
99
});
1010
1111
const { namespace, headingLevel } = props.parse(Astro.props);
@@ -22,6 +22,10 @@ const definitions: NonNullable<(typeof node)["definition"]>[] = [];
2222
2323
function flattenSubtree(node: (typeof registry)["subtree"]) {
2424
for (const value of node.values()) {
25+
// skip commands marked as hidden
26+
if (value.definition?.metadata?.hidden) {
27+
continue;
28+
}
2529
if (value.definition?.type === "command") {
2630
definitions.push(value.definition);
2731
} else {
@@ -37,11 +41,14 @@ flattenSubtree(node.subtree);
3741
definitions.map((definition) => {
3842
if (definition.type !== "command") {
3943
throw new Error(
40-
`[WranglerNamespace] Expected "command" but got "${definition.type}" for "${definition.command}"`,
44+
`[WranglerNamespace] Expected "command" but got "${definition.type}" for "${definition.command}"`
4145
);
4246
}
4347
return (
44-
<WranglerCommand command={definition.command.replace("wrangler ", "")} headingLevel={headingLevel} />
48+
<WranglerCommand
49+
command={definition.command.replace("wrangler ", "")}
50+
headingLevel={headingLevel}
51+
/>
4552
);
4653
})
4754
}

src/components/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ export { default as Details } from "./Details.astro";
2020
export { default as DirectoryListing } from "./DirectoryListing.astro";
2121
export { default as Example } from "./Example.astro";
2222
export { default as ExternalResources } from "./ExternalResources.astro";
23+
export { default as ExtraFlagDetails } from "./ExtraFlagDetails.astro";
2324
export { default as Feature } from "./Feature.astro";
2425
export { default as FeatureTable } from "./FeatureTable.astro";
2526
export { default as Flex } from "./Flex.astro";
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
---
2+
title: New SaaS Security weekly digests with API CASB
3+
description: Cloudflare CASB now offers a weekly email digest, summarizing your organization's latest SaaS security findings, integration health, and content exposures.
4+
products:
5+
- casb
6+
date: 2025-11-14
7+
---
8+
9+
You can now stay on top of your SaaS security posture with the new **CASB Weekly Digest** notification. This opt-in email digest is delivered to your inbox every Monday morning and provides a high-level summary of your organization's Cloudflare API CASB findings from the previous week.
10+
11+
This allows security teams and IT administrators to get proactive, at-a-glance visibility into new risks and integration health without having to log in to the dashboard.
12+
13+
To opt in, navigate to **Manage Account** > **Notifications** in the Cloudflare dashboard to configure the **CASB Weekly Digest** alert type.
14+
15+
### Key capabilities
16+
17+
- **At-a-glance summary** — Review new high/critical findings, most frequent finding types, and new content exposures from the past 7 days.
18+
- **Integration health** — Instantly see the status of all your connected SaaS integrations (Healthy, Unhealthy, or Paused) to spot API connection issues.
19+
- **Proactive alerting** — The digest is sent automatically to all subscribed users every Monday morning.
20+
- **Easy to configure** — Users can opt in by enabling the notification in the Cloudflare dashboard under **Manage Account** > **Notifications**.
21+
22+
### Learn more
23+
24+
- Configure [notification preferences](/notifications/) in Cloudflare.
25+
26+
The CASB Weekly Digest notification is available to all Cloudflare users today.

src/content/docs/ai-crawl-control/index.mdx

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ import {
1919
LinkButton,
2020
RelatedProduct,
2121
Card,
22+
Stream
2223
} from "~/components";
2324

2425
<Plan type="all" />
@@ -27,6 +28,12 @@ import {
2728

2829
Monitor and control how AI services access your website content.
2930

31+
<Stream
32+
id="c2f3d8aada64a53e6cc118e5af834601"
33+
title="Introduction to AI Crawl Control"
34+
thumbnail="1m37s"
35+
/>
36+
3037
</Description>
3138

3239
AI companies use web content to train their models and power AI applications. AI Crawl Control (formerly AI Audit) gives you visibility into which AI services are accessing your content, and provides tools to manage access according to your preferences.

src/content/docs/ai-gateway/tutorials/deploy-aig-worker.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ export default {
8989
};
9090
```
9191

92-
To make this work, you need to use [`wrangler secret put`](/workers/wrangler/commands/#put) to set your `OPENAI_API_KEY`. This will save the API key to your environment so your Worker can access it when deployed. This key is the API key you created earlier in the OpenAI dashboard:
92+
To make this work, you need to use [`wrangler secret put`](/workers/wrangler/commands/#secret-put) to set your `OPENAI_API_KEY`. This will save the API key to your environment so your Worker can access it when deployed. This key is the API key you created earlier in the OpenAI dashboard:
9393

9494
<PackageManagers type="exec" pkg="wrangler" args="secret put OPENAI_API_KEY" />
9595

src/content/docs/api-shield/security/schema-validation/index.mdx

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ Cloudflare has launched Schema validation 2.0. For help configuring the previous
1818

1919
You can migrate to Schema validation 2.0 manually by uploading your schemas to the new system.
2020

21+
---
22+
2123
## Process
2224

2325
<GlossaryTooltip term="API endpoint">Endpoints</GlossaryTooltip> must be added to [Endpoint Management](/api-shield/management-and-monitoring/endpoint-management/) for Schema validation to protect them. Uploading a schema via the Cloudflare dashboard will automatically add endpoints, or you can manually add them from [API Discovery](/api-shield/security/api-discovery/).
@@ -31,6 +33,8 @@ If you are uploading a schema via the API or Terraform, you must parse the schem
3133
To view the contents in your learned schema, refer to [Export a schema](/api-shield/management-and-monitoring/endpoint-management/schema-learning/#export-a-schema) in Endpoint Management.
3234
:::
3335

36+
---
37+
3438
### Add validation by uploading a schema
3539

3640
<Tabs syncKey="dashNewNav">
@@ -230,7 +234,6 @@ To change the default action:
230234
</TabItem>
231235
</Tabs>
232236

233-
234237
### Change the action of a single endpoint
235238

236239
You can change individual endpoint actions separately from the default action in Schema validation.
@@ -348,12 +351,16 @@ To delete currently uploaded or learned schemas:
348351
</TabItem>
349352
</Tabs>
350353

354+
---
355+
351356
## Specifications
352357

353358
Cloudflare currently only accepts [OpenAPI v3 schemas](https://spec.openapis.org/oas/v3.0.3.html). The accepted file formats are YAML (`.yml` or `.yaml` file extension) and JSON (`.json` file extension).
354359

355360
OpenAPI schemas generated by different tooling may not be specific enough to import to Schema validation. We recommend using a third-party tool such as [Swagger Editor](https://swagger.io/tools/swagger-editor/) to ensure that schemas are compliant to the OpenAPI specification.
356361

362+
---
363+
357364
## Limitations
358365

359366
Cloudflare API Shield's Schema validation (importing) and [Schema learning](/api-shield/management-and-monitoring/endpoint-management/schema-learning/) (exporting) capabilities rely on the [OpenAPI Specification (OAS) v3.0](https://spec.openapis.org/oas/v3.0.3).
@@ -452,6 +459,8 @@ Refer to the information below for more details on Schema validation's current s
452459
- [`uniqueItems`](https://spec.openapis.org/oas/v3.0.3#schema-object)
453460
- This field is currently not validated by Schema validation.
454461

462+
---
463+
455464
## Body inspection
456465

457466
API Shield has the ability to identify body specifications contained in uploaded schemas and validate that the data of incoming API requests adheres to them.
@@ -478,6 +487,25 @@ Cloudflare allows specifying the following media-ranges in the OpenAPI request b
478487

479488
Media-ranges can also be configured to enforce a `charset` parameter. For this, Cloudflare only accepts the `charset` parameter with a static value of `utf-8` as part of the media-range specification and when configured, we will similarly require the request's content-type to carry this charset.
480489

490+
---
491+
492+
## Troubleshooting
493+
494+
This section addresses common issues you may encounter when using schema validation.
495+
496+
### `OneOf` constraint error schema violation in the Security Events
497+
498+
A `OneOf` constraint error means an API request failed schema validation because its body did not match exactly one of the options defined in a [`oneOf`](https://swagger.io/docs/specification/v3_0/data-models/oneof-anyof-allof-not/) list within your uploaded schema.
499+
500+
The request was invalid for one of two reasons:
501+
502+
- **Matches Zero**: The payload did not correctly match any of the available subschemas. This is common when a discriminator field is set, but the payload is missing other required fields for that type.
503+
- **Matches Multiple**: The payload was ambiguous and matched more than one subschema. This happens with generic schemas (for example, if a payload includes both an `email` and a `phone` field, it might match both an `email` and a `phone` schema definition, violating the "exactly one" rule).
504+
505+
To fix this, check the failing request body against the API schema definition. It will either be missing required fields for the intended type or include properties from multiple different, conflicting types that make it ambiguous.
506+
507+
---
508+
481509
## Availability
482510

483511
Schema validation is available for all customers. Refer to [Plans](/api-shield/plans/) for more information based on your plan type.

0 commit comments

Comments
 (0)