diff --git a/docs/product/drains/index.mdx b/docs/product/drains/index.mdx
index 31bb0f022f11b..07a51846d0210 100644
--- a/docs/product/drains/index.mdx
+++ b/docs/product/drains/index.mdx
@@ -13,3 +13,5 @@ To send data to Sentry, we recommend using the Sentry SDKs. However we also supp
## Drains
- [Vercel](/product/drains/integration/vercel/)
+- [Cloudflare](/product/drains/integration/cloudflare/)
+- [Heroku](/product/drains/integration/heroku/)
diff --git a/docs/product/drains/integration/cloudflare.mdx b/docs/product/drains/integration/cloudflare.mdx
new file mode 100644
index 0000000000000..49fc7d115ca42
--- /dev/null
+++ b/docs/product/drains/integration/cloudflare.mdx
@@ -0,0 +1,100 @@
+---
+title: Cloudflare Workers Observability
+sidebar_order: 75
+description: Learn how to set up Cloudflare Workers Observability to send forward logs and traces data to Sentry.
+---
+
+Cloudflare Workers supports [exporting OpenTelemetry (OTEL)-compliant](https://developers.cloudflare.com/workers/observability/exporting-opentelemetry-data/) to send logs and traces to Sentry.
+
+- Logs: Application logs including `console.log()` output and system-generated logs
+- Traces: Traces showing request flows through your Worker and connected services
+
+## Prerequisites
+
+Before you begin, ensure you have:
+
+- A deployed Worker that you want to monitor
+- A Sentry project you want to send data to
+
+## Step 1: Set up destination in the Cloudflare dashboard
+
+To set up a destination in the Cloudflare dashboard, navigate to your Cloudflare account's [Workers Observability](https://dash.cloudflare.com/?to=/:account/workers-and-pages/observability/pipelines) section. Then click **Add destination** and configure either a traces or logs destination.
+
+### Logs Destination
+
+To configure your logs destination, click **Add destination** and configure the following:
+
+1. Destination Name: `sentry-logs` (or any descriptive name)
+2. Destination Type: Select Logs
+3. OTLP Endpoint: Your Sentry OTLP logs endpoint (e.g., `https://{HOST}/api/{PROJECT_ID}/integration/otlp/v1/logs`)
+
+```
+___OTLP_LOGS_URL___
+```
+
+4. Custom Headers: Add the Sentry authentication header:
+ - Header name: `x-sentry-auth`
+ - Header value: `sentry sentry_key={SENTRY_PUBLIC_KEY}` where `{SENTRY_PUBLIC_KEY}` is your Sentry project's public key
+
+```
+sentry sentry_key=___PUBLIC_KEY___
+```
+
+You can find your Sentry OTLP logs endpoint and authentication header value in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Logs Endpoint** section.
+
+### Traces Destination
+
+To configure your traces destination, click **Add destination** and configure the following:
+
+1. Destination Name: `sentry-traces` (or any descriptive name)
+2. Destination Type: Select Traces
+3. OTLP Endpoint: Your Sentry OTLP traces endpoint (e.g., `https://{HOST}/api/{PROJECT_ID}/integration/otlp/v1/traces`)
+
+```
+___OTLP_TRACES_URL___
+```
+
+4. Custom Headers: Add the Sentry authentication header:
+ - Header name: `x-sentry-auth`
+ - Header value: `sentry sentry_key={SENTRY_PUBLIC_KEY}` where `{SENTRY_PUBLIC_KEY}` is your Sentry project's public key
+
+```
+sentry sentry_key=___PUBLIC_KEY___
+```
+
+You can find your Sentry OTLP traces endpoint and authentication header value in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Traces Endpoint** section.
+
+## Step 2: Configure your Worker
+
+With your destinations created in the Cloudflare dashboard, update your Worker's configuration to enable telemetry export.
+
+```toml {filename:wrangler.toml}
+[observability.traces]
+enabled = true
+# Must match the destination name in the dashboard
+destinations = [ "sentry-traces" ]
+
+[observability.logs]
+enabled = true
+# Must match the destination name in the dashboard
+destinations = [ "sentry-logs" ]
+```
+
+```jsonc {filename:wrangler.jsonc}
+{
+ "observability": {
+ "traces": {
+ "enabled": true,
+ // Must match the destination name in the dashboard
+ "destinations": ["sentry-traces"],
+ },
+ "logs": {
+ "enabled": true,
+ // Must match the destination name in the dashboard
+ "destinations": ["sentry-logs"],
+ },
+ },
+}
+```
+
+After updating your configuration, deploy your Worker for the changes to take effect.
diff --git a/docs/product/drains/integration/heroku.mdx b/docs/product/drains/integration/heroku.mdx
new file mode 100644
index 0000000000000..67e9230b979ff
--- /dev/null
+++ b/docs/product/drains/integration/heroku.mdx
@@ -0,0 +1,44 @@
+---
+title: Heroku Telemetry Drains
+sidebar_order: 75
+description: Learn how to set up Heroku Telemetry Drains to send forward logs and traces data to Sentry.
+---
+
+Heroku Telemetry supports [exporting OpenTelemetry (OTEL)-compliant](https://devcenter.heroku.com/articles/heroku-telemetry) to send logs and traces to Sentry. Sending metrics is not supported yet.
+
+
+
+Heroku telemetry drains are only available to [Fir](https://devcenter.heroku.com/articles/generations#fir)-generation apps and spaces. We are tracking support for Cedar-generation apps and spaces in [this issue](https://github.com/getsentry/sentry/issues/91727).
+
+
+
+## Prerequisites
+
+Before you begin, ensure you have:
+
+- A Heroku app that you want to monitor
+- A Sentry project you want to send data to
+
+## Adding a Telemetry Drain
+
+To add a telemetry drain to an app or space, use the `heroku telemetry:add` command. For more details, see the [Heroku Telemetry documentation](https://devcenter.heroku.com/articles/working-with-heroku-telemetry-drains#add-a-telemetry-drain).
+
+When using the `heroku telemetry:add` command, you'll need to provide your Sentry project's OTLP endpoint URL and a `x-sentry-auth` header with the public key. This can be found in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **OpenTelemetry (OTLP)**.
+
+```bash
+heroku telemetry:add ___OTLP_URL___ --app myapp --signals logs,traces --transport http --headers '{"x-sentry-auth":"sentry sentry_key=___PUBLIC_KEY___"}'
+```
+
+To add a telemetry drain to the entire space:
+
+```bash
+heroku telemetry:add ___OTLP_URL___ --space myspace --signals logs,traces --headers '{"x-sentry-auth":"sentry sentry_key=___PUBLIC_KEY___"}'
+```
+
+You can use the `--signals` flag to specify the signals you want to send to Sentry. Only `logs` and `traces` are supported.
+
+
+
+Sentry only supports the `http` `--transport` flag.
+
+
diff --git a/docs/product/drains/integration/vercel.mdx b/docs/product/drains/integration/vercel.mdx
index 41e7da71d3f22..15c15c2faddaa 100644
--- a/docs/product/drains/integration/vercel.mdx
+++ b/docs/product/drains/integration/vercel.mdx
@@ -4,15 +4,26 @@ sidebar_order: 75
description: Learn how to set up Vercel drains to send forward logs and traces data to Sentry.
---
+Vercel Drains let you forward traces and logs from applications running on Vercel to Sentry.
+
+## Prerequisites
+
+Before you begin, ensure you have:
+
+- A Vercel project that you want to monitor
+- A Sentry project you want to send data to
+
+## Set up a Drain
+
To set up a Drain in Vercel you'll need to create a new Drain in the Vercel settings. For more information on Vercel Drains, please see the [Vercel drain documentation](https://vercel.com/docs/drains).
1. From the Vercel dashboard, go to **Team Settings > Drains** and click **Add Drain**.
2. Choose a data type. Currently only Logs and Traces are supported.
-- [Logs](#log-drains)
-- [Traces](#trace-drains)
+- [Logs](#log-drains): Runtime, build, and static logs from your deployments (supports custom endpoints and native integrations)
+- [Traces](#trace-drains): Distributed tracing data in OpenTelemetry format (supports custom endpoints and native integrations)
-## Log Drains
+### Log Drains
@@ -34,13 +45,13 @@ After selecting the Logs data type, you'll need to configure the drain to send d
- **Firewall**: Outputs log data from requests denied by [Vercel Firewall](https://vercel.com/docs/vercel-firewall) rules
4. Select which environments to drain from. You can choose to drain from all environments or select specific ones.
-5. Under the custom endpoint tab add the Sentry Vercel Log Drain Endpoint in the URL field. You can find the endpoint in your Sentry Project Settings under **Client Keys (DSN)** > **Vercel**. You can select either JSON or NDJSON encoding.
+5. Under the custom endpoint tab add the Sentry Vercel Log Drain Endpoint in the URL field. You can find the endpoint in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **Vercel**. You can select either JSON or NDJSON encoding.
-```URL
+```
___VERCEL_LOG_DRAIN_URL___
```
-6. Click the Custom Headers toggle and add the Sentry Authentication Header. You'll also find the header value in your Sentry Project Settings under **Client Keys (DSN)** > **Vercel**.
+6. Click the Custom Headers toggle and add the Sentry Authentication Header. You'll also find the header value in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **Vercel**.
```
x-sentry-auth: sentry sentry_key=___PUBLIC_KEY___
@@ -48,19 +59,19 @@ x-sentry-auth: sentry sentry_key=___PUBLIC_KEY___
7. To test that the log drain is working, you can send a test log to your drain by clicking the Test button.
-## Trace Drains
+### Trace Drains
After selecting the Traces data type, you'll need to configure the drain to send data to Sentry.
1. Provide a name for your drain and select which projects should send data to your endpoint. You can choose all projects or select specific ones.
2. Configure the sampling rate to control the volume of data sent to your drain. We recommend sampling 100% of the data to ensure you get all the data you need.
-3. Under the custom endpoint tab add the Sentry Vercel Log Drain Endpoint in the URL field. You can find the endpoint in your Sentry Project Settings under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Traces Endpoint** section.
+3. Under the custom endpoint tab add the Sentry Vercel Log Drain Endpoint in the URL field. You can find the endpoint in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Traces Endpoint** section.
-```URL
+```
___OTLP_TRACES_URL___
```
-4. Click the Custom Headers toggle and add the Sentry Authentication Header. You'll also find the header value in your Sentry Project Settings under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Traces Endpoint Headers** section.
+4. Click the Custom Headers toggle and add the Sentry Authentication Header. You'll also find the header value in your [Sentry Project Settings](https://sentry.io/settings/projects/) under **Client Keys (DSN)** > **OpenTelemetry (OTLP)** under the **OTLP Traces Endpoint Headers** section.
```
x-sentry-auth: sentry sentry_key=___PUBLIC_KEY___
diff --git a/src/components/codeContext.tsx b/src/components/codeContext.tsx
index 52a72671ab95d..4fdf172cd8ff0 100644
--- a/src/components/codeContext.tsx
+++ b/src/components/codeContext.tsx
@@ -16,6 +16,7 @@ type ProjectCodeKeywords = {
ORG_SLUG: string;
OTLP_LOGS_URL: string;
OTLP_TRACES_URL: string;
+ OTLP_URL: string;
PROJECT_ID: number;
PROJECT_SLUG: string;
PUBLIC_DSN: string;
@@ -89,6 +90,7 @@ export const DEFAULTS: CodeKeywords = {
MINIDUMP_URL:
'https://o0.ingest.sentry.io/api/0/minidump/?sentry_key=examplePublicKey',
UNREAL_URL: 'https://o0.ingest.sentry.io/api/0/unreal/examplePublicKey/',
+ OTLP_URL: 'https://o0.ingest.sentry.io/api/0/integration/otlp',
OTLP_TRACES_URL: 'https://o0.ingest.sentry.io/api/0/integration/otlp/v1/traces',
OTLP_LOGS_URL: 'https://o0.ingest.sentry.io/api/0/integration/otlp/v1/logs',
VERCEL_LOG_DRAIN_URL: 'https://o0.ingest.sentry.io/api/0/integration/vercel/logs/',
@@ -143,16 +145,24 @@ const formatUnrealEngineURL = ({scheme, host, pathname, publicKey}: Dsn) => {
return `${scheme}${host}/api${pathname}/unreal/${publicKey}/`;
};
-const formatVercelLogDrainUrl = ({scheme, host, pathname}: Dsn) => {
- return `${scheme}${host}/api${pathname}/integration/vercel/logs/`;
+const formatIntegrationUrl = ({scheme, host, pathname}: Dsn) => {
+ return `${scheme}${host}/api${pathname}/integration/`;
};
-const formatOtlpTracesUrl = ({scheme, host, pathname}: Dsn) => {
- return `${scheme}${host}/api${pathname}/integration/otlp/v1/traces`;
+const formatOtlpUrl = (dsn: Dsn) => {
+ return `${formatIntegrationUrl(dsn)}otlp`;
};
-const formatOtlpLogsUrl = ({scheme, host, pathname}: Dsn) => {
- return `${scheme}${host}/api${pathname}/integration/otlp/v1/logs`;
+const formatOtlpTracesUrl = (dsn: Dsn) => {
+ return `${formatOtlpUrl(dsn)}/v1/traces`;
+};
+
+const formatOtlpLogsUrl = (dsn: Dsn) => {
+ return `${formatOtlpUrl(dsn)}/v1/logs`;
+};
+
+const formatVercelLogDrainUrl = (dsn: Dsn) => {
+ return `${formatIntegrationUrl(dsn)}vercel/logs/`;
};
const formatApiUrl = ({scheme, host}: Dsn) => {
@@ -247,6 +257,7 @@ export async function fetchCodeKeywords(): Promise {
parsedDsn.host ?? `o${project.organizationId}.ingest.sentry.io`,
MINIDUMP_URL: formatMinidumpURL(parsedDsn),
UNREAL_URL: formatUnrealEngineURL(parsedDsn),
+ OTLP_URL: formatOtlpUrl(parsedDsn),
VERCEL_LOG_DRAIN_URL: formatVercelLogDrainUrl(parsedDsn),
OTLP_TRACES_URL: formatOtlpTracesUrl(parsedDsn),
OTLP_LOGS_URL: formatOtlpLogsUrl(parsedDsn),