diff --git a/CODEOWNERS b/CODEOWNERS
index 19baa24204..78e3ce8044 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -47,7 +47,7 @@ CODEOWNERS @segmentio/segment-doc-team
/src/unify @pwseg
# Protocols owners
-/src/protocols @forstisabella @pwseg
+/src/protocols @forstisabella
# Storage owners
-/src/connections/storage @forstisabella @pwseg
+/src/connections/storage @forstisabella
diff --git a/src/_data/catalog/destination_categories.yml b/src/_data/catalog/destination_categories.yml
index 29314632ea..f53edc30bb 100644
--- a/src/_data/catalog/destination_categories.yml
+++ b/src/_data/catalog/destination_categories.yml
@@ -1,5 +1,5 @@
# AUTOGENERATED FROM PUBLIC API. DO NOT EDIT
-# destination categories last updated 2024-11-26
+# destination categories last updated 2024-12-05
items:
- display_name: A/B Testing
slug: a-b-testing
diff --git a/src/_data/catalog/destinations.yml b/src/_data/catalog/destinations.yml
index 82eeaa97e7..dcdf0ce258 100644
--- a/src/_data/catalog/destinations.yml
+++ b/src/_data/catalog/destinations.yml
@@ -1,5 +1,5 @@
# AUTOGENERATED FROM PUBLIC API. DO NOT EDIT
-# destination data last updated 2024-11-26
+# destination data last updated 2024-12-05
items:
- id: 637e8d185e2dec264895ea89
display_name: 1Flow
@@ -108163,6 +108163,131 @@ items:
actions: []
presets: []
partnerOwned: false
+- id: 6720ddceaa24532723b39d63
+ display_name: Singlestore
+ name: Singlestore
+ slug: singlestore
+ hidden: false
+ endpoints:
+ - US
+ regions:
+ - us-west-2
+ - eu-west-1
+ url: connections/destinations/catalog/singlestore
+ previous_names:
+ - Singlestore
+ website: https://www.singlestore.com
+ status: PUBLIC_BETA
+ categories:
+ - Raw Data
+ - Analytics
+ logo:
+ url: https://cdn-devcenter.segment.com/c4bfae1b-e5a6-43ff-8277-d0cf02b54114.svg
+ mark:
+ url: https://cdn-devcenter.segment.com/04b8886a-5fe7-4c05-aab8-4436cd43fbce.svg
+ methods:
+ track: true
+ identify: true
+ group: true
+ alias: true
+ screen: false
+ page: true
+ platforms:
+ browser: true
+ mobile: false
+ server: true
+ warehouse: false
+ cloudAppObject: false
+ linkedAudiences: true
+ components: []
+ browserUnbundlingSupported: false
+ browserUnbundlingPublic: false
+ replay: false
+ connection_modes:
+ device:
+ web: false
+ mobile: false
+ server: false
+ cloud:
+ web: true
+ mobile: false
+ server: true
+ settings:
+ - name: dbName
+ type: string
+ defaultValue: ''
+ description: The name of the database.
+ required: true
+ label: Database Name
+ - name: environment
+ type: select
+ defaultValue: Prod
+ description: The environment of the Singlestore database.
+ required: true
+ label: Environment
+ - name: host
+ type: string
+ defaultValue: ''
+ description: The host of the Singlestore database.
+ required: true
+ label: Host
+ - name: password
+ type: password
+ defaultValue: ''
+ description: The password of the Singlestore database.
+ required: true
+ label: Password
+ - name: port
+ type: number
+ defaultValue: 3306
+ description: The port of the Singlestore database.
+ required: true
+ label: Port
+ - name: username
+ type: string
+ defaultValue: ''
+ description: The username of the Singlestore database.
+ required: true
+ label: Username
+ actions:
+ - id: xxgUnx4BHqGLDYCJ4TiWPW
+ name: Send Data
+ slug: send
+ description: Send data to Singlestore.
+ platform: CLOUD
+ hidden: false
+ defaultTrigger: >-
+ type = "track" or type = "screen" or type = "identify" or type = "page" or
+ type = "group" or type = "alias"
+ fields:
+ - id: o4FSHmgGNDZjGayk4VpDqK
+ sortOrder: 0
+ fieldKey: database
+ label: Database
+ type: STRING
+ description: The name of the SingleStore database to send data to.
+ placeholder: ''
+ required: true
+ multiple: false
+ choices: null
+ dynamic: true
+ allowNull: false
+ - id: pV6xn9yXzxddMKs6EU2Ps2
+ sortOrder: 1
+ fieldKey: message
+ label: Message
+ type: OBJECT
+ description: The complete event payload.
+ placeholder: ''
+ defaultValue:
+ '@path': $.
+ required: true
+ multiple: false
+ choices: null
+ dynamic: false
+ allowNull: false
+ presets: []
+ partnerOwned: true
- id: 5c768ec31413290001ebdd2e
display_name: Singular
name: Singular
@@ -113797,6 +113922,12 @@ items:
mobile: false
server: true
settings:
+ - name: audience_identifier
+ type: select
+ defaultValue: computation_key
+ description: The audience identifier from your Taboola account.
+ required: false
+ label: Audience Identifier
- name: client_id
type: string
defaultValue: ''
@@ -113818,7 +113949,7 @@ items:
hidden: false
defaultTrigger: type = "track"
fields:
- - id: 5LsQp4x4a2mc1wS4x3DyGK
+ - id: hYPMUogwA6UXaPijimwgGy
sortOrder: 3
fieldKey: user_email
label: Email address
@@ -113839,7 +113970,7 @@ items:
dynamic: false
allowNull: false
hidden: false
- - id: idcYjf4xSRKXSH3vaBhuxv
+ - id: mrsc76C98Fes6horASeqQU
sortOrder: 5
fieldKey: enable_batching
label: Batch events
@@ -113854,7 +113985,7 @@ items:
choices: null
dynamic: false
allowNull: false
- - id: kGx5FWvwYVYNB7jgs8Jx4W
+ - id: 5gysPJ3QfJvkihzwjJsW1x
sortOrder: 6
fieldKey: device_id
label: Mobile Device ID
diff --git a/src/_data/catalog/destinations_private.yml b/src/_data/catalog/destinations_private.yml
index 3aea21e3c8..541ad04794 100644
--- a/src/_data/catalog/destinations_private.yml
+++ b/src/_data/catalog/destinations_private.yml
@@ -1,5 +1,5 @@
# AUTOGENERATED FROM PUBLIC API. DO NOT EDIT
-# destination data last updated 2024-11-26
+# destination data last updated 2024-12-05
items:
- id: 54521fd925e721e32a72eee1
display_name: Pardot
@@ -670,14 +670,24 @@ items:
browserUnbundlingPublic: false
replay: false
settings:
- - name: apiKey
+ - name: accountid
+ type: string
+ defaultValue: ''
+ description: Rokt ID assigned to your particular account.
+ required: true
+ label: Rokt Account ID
+ - name: rpub
+ type: string
+ defaultValue: ''
+ description: Rokt public key, starts with `rpub-`
+ required: true
+ label: Rokt public key
+ - name: rsec
type: password
defaultValue: ''
- description: >-
- APIKey used for Rokt API authorization before sending custom audiences
- data
+ description: Rokt secret key, starts with `rsec-`
required: true
- label: API Key provided by Rokt integration
+ label: Rokt secret key
actions:
- id: oNpU37CaJbuxq7PqBGEYz4
name: Sync Engage Audience to Rokt
@@ -689,7 +699,7 @@ items:
hidden: false
defaultTrigger: type = "track" or type = "identify"
fields:
- - id: hkunPAdr3JHSMDGCW8Hr7P
+ - id: aCdquof3Bd6c3wug29BeEY
sortOrder: 0
fieldKey: custom_audience_name
label: Custom Audience Name
@@ -704,7 +714,7 @@ items:
dynamic: false
allowNull: false
hidden: false
- - id: k8zCN8YH12e6N9EqeTbK3m
+ - id: 4qkzJunzrb3ZTtn5VEXgHo
sortOrder: 1
fieldKey: segment_computation_action
label: Segment Computation Action
@@ -721,7 +731,7 @@ items:
dynamic: false
allowNull: false
hidden: false
- - id: 4rPpCohUqos6N47caU35ct
+ - id: fq5EwYtoVfnBh5wRJ6YMhH
sortOrder: 2
fieldKey: email
label: Email
@@ -742,7 +752,7 @@ items:
dynamic: false
allowNull: false
hidden: false
- - id: ab4jivj6fXuTQ4TM68JERi
+ - id: ae6rFSFRB9dSGZNVAxPxjb
sortOrder: 3
fieldKey: traits_or_props
label: traits or properties object
@@ -765,7 +775,7 @@ items:
dynamic: false
allowNull: false
hidden: false
- - id: vH5iWCWZpV15WjgDFTnCXd
+ - id: btfk8jdv6uDRh7KT6dmcwC
sortOrder: 4
fieldKey: enable_batching
label: enable batching to rokt api
diff --git a/src/_data/catalog/source_categories.yml b/src/_data/catalog/source_categories.yml
index 4bd3e0e92a..6d3b0b5963 100644
--- a/src/_data/catalog/source_categories.yml
+++ b/src/_data/catalog/source_categories.yml
@@ -1,5 +1,5 @@
# AUTOGENERATED FROM PUBLIC API. DO NOT EDIT
-# source categories last updated 2024-11-26
+# source categories last updated 2024-12-05
items:
- display_name: A/B Testing
slug: a-b-testing
diff --git a/src/_data/catalog/sources.yml b/src/_data/catalog/sources.yml
index 0c171fa22f..2d9c96f9b5 100644
--- a/src/_data/catalog/sources.yml
+++ b/src/_data/catalog/sources.yml
@@ -1,5 +1,5 @@
# AUTOGENERATED FROM PUBLIC API. DO NOT EDIT
-# sources last updated 2024-11-26
+# sources last updated 2024-12-05
items:
- id: 8HWbgPTt3k
display_name: .NET
diff --git a/src/_includes/content/destination-dossier.html b/src/_includes/content/destination-dossier.html
index 9c7241aab1..f437f81bcf 100644
--- a/src/_includes/content/destination-dossier.html
+++ b/src/_includes/content/destination-dossier.html
@@ -59,6 +59,7 @@
Destination Info
This destination is not compatible with Destination Insert Functions.
{% endunless %}
{% endif %}
+ {% if thisDestination == '64c031541451bb784943f809' or thisDestination == '63e42d44b0a59908dc4cacc6' or thisDestination == '642440d46b66b3eeac42b581' %} This destination is not supported in EU workspaces. For more information, see the Regional Segment documentation. {% endif %}
{% if destinationInfo.status == "PUBLIC_BETA" %}This destination is in Beta{% endif %}
{% if page.engage == true %}This destination is only compatible with Twilio Engage.{% endif %}
diff --git a/src/api/public-api/fql.md b/src/api/public-api/fql.md
index 6811ee5cdd..58f439bd7c 100644
--- a/src/api/public-api/fql.md
+++ b/src/api/public-api/fql.md
@@ -7,10 +7,9 @@ redirect_from:
{% include content/papi-ga.html %}
+This reference provides a comprehensive overview of the Segment Destination Filter query language. For information on the Destination Filters API (including information on migrating from the Config API), visit the [Destination Filters API reference](https://docs.segmentapis.com/tag/Destination-Filters){:target="_blank"}.
-Destination Filter Reference documentation can be found in the [main Config API reference docs](https://reference.segmentapis.com/#6c12fbe8-9f84-4a6c-848e-76a2325cb3c5).
-
-The Transformations API uses Filter Query Language (FQL) to filter JSON objects and conditionally apply transformations. You can use FQL statements to:
+The [Transformations API](https://docs.segmentapis.com/tag/Transformations/){:target="_blank"} uses Filter Query Language (FQL) to filter JSON objects and conditionally apply transformations. You can use FQL statements to:
- Apply filters that evaluate to `true` or `false` based on the contents of each Segment event. If the statement evaluates to `true`, the transformation is applied, and if it is `false` the transformation is not applied.
- [Define new properties based on the result of an FQL statement](/docs/protocols/transform/#use-cases).
diff --git a/src/connections/destinations/catalog/actions-drip/index.md b/src/connections/destinations/catalog/actions-drip/index.md
new file mode 100644
index 0000000000..1c39a9e873
--- /dev/null
+++ b/src/connections/destinations/catalog/actions-drip/index.md
@@ -0,0 +1,26 @@
+--
+title: Drip (Actions) Destination
+id: 673b62169b3342fbe0fc28da
+---
+
+{% include content/plan-grid.md name="actions" %}
+
+[Drip](https://www.getdrip.com){:target="_blank”} is a nurture marketing platform Empowering B2C SMBs to convert long-sales cycle prospects into lifelong buyers with sophisticated and personalized marketing automation.
+
+This destination is maintained by Drip. For any issues with the destination, [contact their Support team](mailto:support@drip.com).
+
+## Getting started
+
+1. From your workspace's [Destination catalog page](https://app.segment.com/goto-my-workspace/destinations/catalog){:target="_blank”} search for "Drip (Actions)".
+2. Select Drip (Actions) and click **Add Destination**.
+3. Select an existing Source to connect to Drip (Actions).
+4. Go to the [Drip dashboard](https://www.getdrip.com/dashboard){:target="_blank"}
+5. In the Settings tab, select the User Settings, find and copy the **API key** at the bottom of the page.
+6. In a terminal, run `echo : | base64` to encode the API key.
+7. Enter the encoded **API Key** in the Drip destination settings in Segment.
+8. Your account ID is a seven digit number that can be found in the address bar of your browser when you are logged into Drip. It is the number after `https://www.getdrip.com/`.
+9. Enter the **Account ID** in the Drip destination settings in Segment.
+
+{% include components/actions-fields.html %}
+
+For more information about developing with Drip, check out their [documentation](https://developer.drip.com/){:target="_blank”}.
diff --git a/src/connections/destinations/catalog/actions-google-enhanced-conversions/index.md b/src/connections/destinations/catalog/actions-google-enhanced-conversions/index.md
index c5362f9475..93f1ccfb97 100644
--- a/src/connections/destinations/catalog/actions-google-enhanced-conversions/index.md
+++ b/src/connections/destinations/catalog/actions-google-enhanced-conversions/index.md
@@ -13,7 +13,7 @@ hide_action:
name: "Call Conversion"
- id: mFUPoRTLRXhZ3sGbM8H3Qo
name: "Conversion Adjustment"
- - id: oWa5UioHjz5caK7t7tc57f
+ - id: h8sh7d7TUJYR1uv6RKZTGQ
name: 'Upload Enhanced Conversion (Legacy)'
---
diff --git a/src/connections/destinations/catalog/actions-intercom-web/index.md b/src/connections/destinations/catalog/actions-intercom-web/index.md
index f57cbbc62d..a8a31cad66 100644
--- a/src/connections/destinations/catalog/actions-intercom-web/index.md
+++ b/src/connections/destinations/catalog/actions-intercom-web/index.md
@@ -70,5 +70,36 @@ If you are seeing 404 responses in your browser's network tab, you've likely enc
- You set the wrong App ID on the Intercom Actions (Web) destination settings page.
- You set the wrong Regional Data Hosting value on the Intercom Actions (Web) destination settings page. Intercom gates regional endpoints by plan level, so you may not have access to EU data hosting.
-### Intercom does not support rETL event batching
-The Intercom (Web) Actions destination does not support the bulk contacts endpoint, and therefore is unable to support batching events in rETL.
+### Intercom does not support Reverse ETL event batching
+The Intercom (Web) Actions destination does not support the bulk contacts endpoint, and therefore is unable to support batching events in Reverse ETL.
+
+### Why are my Identify calls not updating or creating Intercom profiles, or not showing users as leads or visitors?
+Intercom requires requests to include user data/traits beyond `email` or `user_hash` to update or create profiles and change user status from leads/visitors. Without additional user data/traits, Intercom assumes no changes were made to a user's data and does not send a "ping" request.
+
+In the following example, which only includes an `email` and `user_hash`, Intercom would not send a "ping" request and update the status of this user:
+
+```
+analytics.identify("123");
+
+analytics.identify("123", { email: "example@domain.com" });
+
+analytics.identify("123",{email: "example@domain.com"}, {
+ integrations: {
+ Intercom: {
+ user_hash: "81b65b9abea0444437a5d92620f03acc33f04fabbc32da1e047260024f80566a"
+ }
+ }})
+```
+
+However, in the following example that also contains the `name` trait, Intercom sends a "ping" request and updates the status of this user:
+
+```
+analytics.identify("123", {
+ email: "example@domain.com",
+ name: "John Doe"
+}, {
+ integrations: { Intercom: { user_hash: "hash" } }
+});
+```
+
+When sending calls to Intercom, always include a trait, like`name`. If you don't have a trait to send with Identify calls, map Segment's `timestamp` field to Intercom's `last_request_at` field.
diff --git a/src/connections/destinations/catalog/actions-liveramp-audiences/index.md b/src/connections/destinations/catalog/actions-liveramp-audiences/index.md
index e24c330428..14a57bc97f 100644
--- a/src/connections/destinations/catalog/actions-liveramp-audiences/index.md
+++ b/src/connections/destinations/catalog/actions-liveramp-audiences/index.md
@@ -41,6 +41,9 @@ The LiveRamp Audiences destination can be connected to **Twilio Engage sources o
7. In the settings that appear in the side panel, toggle the Send Track option on and do not change the Audience Entered/Audience Exited event names. Click Save Settings
8. File a [support case](https://docs.liveramp.com/connect/en/considerations-when-uploading-the-first-file-to-an-audience.html#creating-a-support-case){:target="_blank"} with the LiveRamp team to configure and enable ingestion.
+> info "Mapping tester availability"
+> The Mapping Tester isn't available for this destination. Since this destination requires batched events for activation, testing can only be performed end-to-end with a connected source.
+
{% include components/actions-fields.html settings="false"%}
## Limitations
diff --git a/src/connections/destinations/catalog/actions-s3/index.md b/src/connections/destinations/catalog/actions-s3/index.md
new file mode 100644
index 0000000000..906f2cf0c6
--- /dev/null
+++ b/src/connections/destinations/catalog/actions-s3/index.md
@@ -0,0 +1,100 @@
+---
+title: AWS S3 (Actions) Destination
+hide-boilerplate: true
+hide-dossier: false
+id: 66eaa166f650644f04389e2c
+private: true
+beta: true
+# versions:
+# - name: AWS S3 (Classic)
+# link: /docs/connections/destinations/catalog/aws-s3/
+---
+{% include content/plan-grid.md name="actions" %}
+
+The AWS S3 (Actions) destination allows you to store event data as objects in a secure, scalable cloud storage solution. Each event is written to your S3 bucket, organized into a customizable folder structure such as by event type or timestamp. This makes it easy to manage, archive, and analyze data using downstream tools or AWS services.
+
+
+## Benefits of AWS S3 (Actions) vs AWS S3 Classic
+The traditional AWS S3 Classic destination enabled the storage of raw logs containing data Segment received, directly into your S3 bucket. While this provided a straightforward data storage solution, users often needed to implement additional processing to standardize or transform these logs (in JSON format) for downstream analytics or integrations.
+
+The AWS S3 (Actions) destination enhances this capability by introducing configurable options to format and structure event data prior to storage. This new approach offers several key benefits:
+
+* **Standardized Data Formatting**. AWS S3 (Actions) lets you define consistent output formats for your data, either CSV or TXT file formats, in a folder definition that you choose. The previous AWS S3 Classic Destination only allowed raw JSON payloads stored within a specific folder called `"segment-logs"`.
+
+* **Configurable Data Translation**. AWS S3 (Actions) supports translation rules that can map raw event attributes to more meaningful or actionable representations. You can configure these rules to meet specific data schema requirements by either adding in custom columns or using the default ones.
+
+* **Enhanced Delivery Controls**. The destination provides advanced options for batch size controls and file naming conventions. These controls can help optimize efficiency and simplify data retrieval workflows.
+
+## Getting started
+Setting up the AWS S3 (Actions) destination is a straightforward process designed to help you configure and deploy standardized event data to your Amazon S3 bucket. Follow these steps to get started:
+
+### Prerequisites
+Ensure you have the following in place before configuring the AWS S3 (Actions) destination:
+
+- Amazon S3 Bucket: Create a bucket in your AWS account or use an existing one where you want to store the event data.
+- AWS IAM Permissions: Verify that you have appropriate IAM roles with write access to the S3 bucket and permissions for the Segment connection.
+- IAM Access IDs: Prepare your AWS IAM ARN ID and IAM External ID. These will be needed to authenticate and authorize Segment with your S3 bucket.
+
+
+### Step 1: Create an IAM role in the AWS console
+To set up the IAM role to properly authorize Segment with the AWS S3 (Actions) destination:
+
+1. Log in to your AWS account.
+2. Create a new or use an existing bucket with `PutObject`, `GetObject`, `ListObject` access to the S3 bucket.
+3. Navigate to **IAM > Roles > Create Role**.
+4. Provide the following policy permissions for the IAM that was just created:
+```json
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "PutObjectsInBucket",
+ "Effect": "Allow",
+ "Action": [
+ "s3:PutObject",
+ "s3:PutObjectAcl"
+ ],
+ "Resource": "arn:aws:s3:::/*"
+ }
+ ]
+}
+```
+5. Click on the Trust Relationships tab and edit the trust policy to allow the IAM user to assume the role. If a user is not already created, refer to the AWS documentation to create a user.
+```json
+{
+ "Version": "2012-10-17",
+ "Statement": [
+ {
+ "Sid": "",
+ "Effect": "Allow",
+ "Principal": {
+ "AWS":
+ "arn:aws:iam::595280932656:role/customer-s3-prod-action-destination-access"
+ },
+ "Action": "sts:AssumeRole",
+ "Condition": {
+ "StringEquals": {
+ "sts:ExternalId": ""
+ }
+ }
+ }
+ ]
+ }
+```
+## Step 2: Add the AWS S3 (Actions) Destination in Segment
+To finish the configuration, enable the AWS S3 (Actions) Destination in your workspace.
+
+1. Add the **AWS S3 (Actions)** destination from the Destinations tab of the catalog.
+2. Select the data source you want to connect to the destination.
+3. Provide a unique name for the destination.
+4. Complete the destination settings:
+ * Enter the name of the region in which the bucket you created above resides.
+ * Enter the name of the bucket you created above. Be sure to enter the bucket's **name** and not URI.
+ * Enter the ARN of the IAM role you created above. The ARN should follow the format `arn:aws:iam::ACCOUNT_ID:role/ROLE_NAME.`
+ * Enter the IAM External ID, which is a value set in the Trust Relationship under your AWS IAM Role.
+5. Enable the destination.
+6. Verify the Segment data is stored in the S3 bucket by navigating to the `/` in the AWS console.
+
+{% include components/actions-fields.html settings="true"%}
+
+
diff --git a/src/connections/destinations/catalog/actions-stackadapt-audiences/images/map-fields-example.png b/src/connections/destinations/catalog/actions-stackadapt-audiences/images/map-fields-example.png
new file mode 100644
index 0000000000..5823644fb8
Binary files /dev/null and b/src/connections/destinations/catalog/actions-stackadapt-audiences/images/map-fields-example.png differ
diff --git a/src/connections/destinations/catalog/actions-stackadapt-audiences/index.md b/src/connections/destinations/catalog/actions-stackadapt-audiences/index.md
new file mode 100644
index 0000000000..b5177e931d
--- /dev/null
+++ b/src/connections/destinations/catalog/actions-stackadapt-audiences/index.md
@@ -0,0 +1,71 @@
+---
+title: StackAdapt Audience Destination
+hide-boilerplate: true
+hide-dossier: true
+beta: true
+id: 66e96b9f4ee97f41caa06487
+hidden: true
+redirect_from: "/connections/destinations/catalog/actions-stackadapt-audiences/"
+---
+
+{% include content/plan-grid.md name="actions" %}
+
+[StackAdapt](https://www.stackadapt.com/){:target="_blank"} is a programmatic advertising platform specializing in audience engagement. StackAdapt enables marketers to deliver high-performing advertising campaigns across channels through real-time bidding, detailed audience targeting, and data-driven insights. StackAdapt’s integration with Twilio Engage helps you sync user data to optimize targeting and improve your campaign outcomes.
+
+This destination is maintained by StackAdapt. For any issues with the destination, please [submit a ticket to StackFAdapt's support team](https://support.stackadapt.com/hc/en-us/requests/new?ticket_form_id=360006572593){:target="_blank"}.
+
+## Getting started
+
+### Getting your StackAdapt GraphQL Token
+
+If you do not have an existing StackAdapt API key, [reach out to the StackAdapt team for help](https://support.stackadapt.com/hc/en-us/requests/new?ticket_form_id=360006572593){:target="_blank"}.
+
+### Setting up the StackAdapt Audience destination in Segment Engage
+
+1. From the Segment web app, navigate to **Connections > Catalog > Destinations**.
+2. Search for and select the "StackAdapt Audience" destination.
+3. Click **Add Destination**.
+4. Select an existing source that is Engage Space to connect to the StackAdapt Audience destination.
+5. Enter a name for your destination.
+6. On the Settings screen, provide your StackAdapt GraphQL API token.
+7. Toggle on the destination using the **Enable Destination** toggle.
+8. Click **Save Changes**.
+9. Follow the steps in the Destinations Actions documentation to [customize mappings](/docs/connections/destinations/actions/#customize-mappings) or use the documentation to [sync an Engage Audience](#sync-an-engage-audience).
+10. Enable the destination and click **Save Changes**.
+
+### Sync an Engage Audience
+
+To sync an Engage audience with StackAdapt:
+
+1. Each Engage audience should only contain profiles that have a valid email address. Profiles missing an email address are not valid on StackAdapt's platform.
+2. Add a condition to the Engage audience to ensure the required email trait is included.
+3. Open the previously created StackAdapt Audience destination.
+4. On the Mappings tab, click **New Mapping** and select **Forward Audience Event**.
+5. Under Define event trigger, click **Add Condition** and add this condition: Event Type is `Track` or `Identify`.
+6. Under **Map fields**, select the advertiser you want to sync the audience with. You can identify a specific advertiser by finding its ID in StackAdapt.
+ 
+
+On StackAdapt platform:
+
+To verify that your audience is syncing with StackAdapt, open StackAdapt and navigate to **Audience & Attribution** > **Customer Data** > **Profiles**. On the Profiles tab, you should be able to see a list of profiles being synced to StackAdapt platform.
+
+> info "Syncs can take up to 4 hours"
+> It can take up to 4 hours from the time you initiate a sync for profiles to show up in StackAdapt.
+
+If you want to create a StackAdapt Audience from your Twilio Engage Audience:
+
+1. Open StackAdapt and navigate to **Audience & Attribution** > **Customer Data** > **Segments**, then click **Create Segment**.
+2. Choose **Segment Engage Audience ID** or **Segment Engage Audience Name** as the rule.
+3. Select the value for the corresponding filter.
+4. Click **Submit** to create the segment.
+
+### Sending an Audience to StackAdapt
+
+1. In Segment, go to Engage > Audiences and select the audience to sync with StackAdapt.
+2. Click **Add Destination** and select **StackAdapt Audience**.
+3. Toggle **Send Track** and **Send Identify** on.
+4. Click **Save**.
+
+## Data and privacy
+
+Review [StackAdapt's Data Processing Agreement](https://www.stackadapt.com/data-processing-agreement){:target="_blank"} to learn more about StackAdapt's privacy and data terms.
\ No newline at end of file
diff --git a/src/connections/destinations/catalog/actions-stackadapt-cloud/index.md b/src/connections/destinations/catalog/actions-stackadapt-cloud/index.md
index 66018f714a..500d8daa10 100644
--- a/src/connections/destinations/catalog/actions-stackadapt-cloud/index.md
+++ b/src/connections/destinations/catalog/actions-stackadapt-cloud/index.md
@@ -3,15 +3,14 @@ title: StackAdapt Destination
hide-boilerplate: true
hide-dossier: true
id: 61d8859be4f795335d5c677c
-redirect_from: '/connections/destinations/catalog/actions-stackadapt/'
+redirect_from: "/connections/destinations/catalog/actions-stackadapt/"
---
{% include content/plan-grid.md name="actions" %}
-By setting up StackAdapt as a Segment destination, your Segment events will be forwarded to [StackAdapt](https://www.stackadapt.com/){:target="_blank"}. This allows you to generate retargeting and lookalike audiences, track conversions, and measure return on ad spend using your Segment events - bypassing the need to install the StackAdapt pixel on your website and write code to send events to StackAdapt.
-
-This destination is maintained by StackAdapt. For any issues with the destination, please [submit a ticket to StackAdapt's support team](https://support.stackadapt.com/hc/en-us/requests/new?ticket_form_id=360006572593){:target="_blank"}.
+By setting up StackAdapt as a Segment destination, your Segment events will be forwarded to [StackAdapt](https://www.stackadapt.com/){:target="\_blank"}. This allows you to generate retargeting and lookalike audiences, track conversions, and measure return on ad spend using your Segment events - bypassing the need to install the StackAdapt pixel on your website and write code to send events to StackAdapt.
+This destination is maintained by StackAdapt. For any issues with the destination, please [submit a ticket to StackAdapt's support team](https://support.stackadapt.com/hc/en-us/requests/new?ticket_form_id=360006572593){:target="\_blank"}.
## Getting started
@@ -20,7 +19,7 @@ This destination is maintained by StackAdapt. For any issues with the destinatio
1. Log in to your StackAdapt account and navigate to the Pixels page.
2. Above the list of pixels, click **Install StackAdapt Pixel**.
- 
+ 
3. In the instructions that appear, copy the universal pixel ID from the code snippet. Below is an example of a code snippet where the universal pixel ID is `sqQHa3Ob1hFi__2EcYYVZg1`.
@@ -41,9 +40,9 @@ This destination is maintained by StackAdapt. For any issues with the destinatio
Segment events that are forwarded to StackAdapt can be used to track ad conversions, and to generate retargeting and lookalike audiences. Please review the StackAdapt documentation for the general setup of these if you are not already familiar:
-- [Creating Conversion Events](https://support.stackadapt.com/hc/en-us/articles/360005859214-Creating-Conversion-Events){:target="_blank"}
-- [Creating Retargeting Audiences](https://support.stackadapt.com/hc/en-us/articles/360005939153-Creating-Retargeting-Audiences){:target="_blank"}
-- [How to Generate and Target a Lookalike Audience](https://support.stackadapt.com/hc/en-us/articles/360023738733-How-to-Generate-and-Target-a-Lookalike-Audience){:target="_blank"}
+- [Creating Conversion Events](https://support.stackadapt.com/hc/en-us/articles/360005859214-Creating-Conversion-Events){:target="\_blank"}
+- [Creating Retargeting Audiences](https://support.stackadapt.com/hc/en-us/articles/360005939153-Creating-Retargeting-Audiences){:target="\_blank"}
+- [How to Generate and Target a Lookalike Audience](https://support.stackadapt.com/hc/en-us/articles/360023738733-How-to-Generate-and-Target-a-Lookalike-Audience){:target="\_blank"}
Setup of conversion events, retargeting audiences, and lookalike audiences that fire on Segment events is largely the same as the setup in the StackAdapt documentation, with a few caveats:
@@ -64,7 +63,7 @@ A Segment event fired with the code `analytics.track("User Registered")` can be
The StackAdapt destination also supports forwarding ecommerce fields for the purpose of creating event rules that match ecommerce events, with default mappings to properties specified in the [Segment V2 Ecommerce Event Spec](/docs/connections/spec/ecommerce/v2/) as described in the below table:
| Segment Ecommerce Event Property | StackAdapt Event Key |
-|----------------------------------|----------------------|
+| -------------------------------- | -------------------- |
| `order_id` | `order_id` |
| `revenue` | `revenue` |
| `product_id` | `product_id` |
@@ -76,7 +75,7 @@ The StackAdapt destination also supports forwarding ecommerce fields for the pur
For events that can involve multiple products, such as checkout events, StackAdapt forwards a JSON array of product objects with a `products` key and fields that map by default to following Segment product array fields:
| Segment Ecommerce Event Property | StackAdapt Product Object Key |
-|----------------------------------|-------------------------------|
+| -------------------------------- | ----------------------------- |
| `products.$.product_id` | `product_id` |
| `products.$.category` | `product_category` |
| `products.$.name` | `product_name` |
@@ -110,7 +109,7 @@ analytics.track('Order Completed', {
Although trait fields are not frequently used in event rules, the StackAdapt destination forwards them and they can be used if desired.
| Segment Trait Property | StackAdapt Event Key |
-|------------------------|----------------------|
+| ---------------------- | -------------------- |
| `traits.email` | `email` |
| `traits.first_name` | `first_name` |
| `traits.last_name` | `last_name` |
@@ -123,13 +122,13 @@ For example, to create a conversion event when a user with the domain `example.c
This rule would match a Segment event fired with code such as:
```javascript
-analytics.track('Order Completed', {
- order_id: '50314b8e9bcf000000000000',
+analytics.track("Order Completed", {
+ order_id: "50314b8e9bcf000000000000",
traits: {
- email: 'john.smith@example.com',
- first_name: 'John',
- last_name: 'Smith',
- phone: '+180055501000'
+ email: "john.smith@example.com",
+ first_name: "John",
+ last_name: "Smith",
+ phone: "+180055501000"
}
});
```
@@ -167,4 +166,4 @@ When forwarding past events using Reverse ETL, only users who have interacted wi
## Data and privacy
-Review [StackAdapt's Data Processing Agreement](https://www.stackadapt.com/data-processing-agreement){:target="_blank"} to learn more about StackAdapt's privacy and data terms.
+Review [StackAdapt's Data Processing Agreement](https://www.stackadapt.com/data-processing-agreement){:target="\_blank"} to learn more about StackAdapt's privacy and data terms.
diff --git a/src/connections/destinations/catalog/actions-the-trade-desk-crm/index.md b/src/connections/destinations/catalog/actions-the-trade-desk-crm/index.md
index 8fc81951b8..5012051d91 100644
--- a/src/connections/destinations/catalog/actions-the-trade-desk-crm/index.md
+++ b/src/connections/destinations/catalog/actions-the-trade-desk-crm/index.md
@@ -45,12 +45,15 @@ Setup is now complete, and the audience starts syncing to The Trade Desk.
To sync additional Audiences from your Engage space, create a separate instance of The Trade Desk CRM Destination.
+> info "Mapping tester availability"
+> The Mapping Tester isn't available for this destination. Since this destination requires batched events for activation, testing can only be performed end-to-end with a connected source.
+
{% include components/actions-fields.html settings="true"%}
## Limitations
-* An audience must have at least 1500 unique members; otherwise, the destination fails, and the data won't sync.
+* An audience must have at least 1500 unique members; otherwise, the destination fails, and the data won't sync.
* Audience attempts to sync once per day.
* Audience sync is a full sync.
diff --git a/src/connections/destinations/catalog/aws-s3/images/aws-s3-catalog.png b/src/connections/destinations/catalog/aws-s3/images/aws-s3-catalog.png
new file mode 100644
index 0000000000..05ade08e14
Binary files /dev/null and b/src/connections/destinations/catalog/aws-s3/images/aws-s3-catalog.png differ
diff --git a/src/connections/destinations/catalog/criteo-offline-conversions/index.md b/src/connections/destinations/catalog/criteo-offline-conversions/index.md
index 5d911ae00a..4878c14c60 100644
--- a/src/connections/destinations/catalog/criteo-offline-conversions/index.md
+++ b/src/connections/destinations/catalog/criteo-offline-conversions/index.md
@@ -3,6 +3,7 @@ title: Criteo Offline Conversions Destination
rewrite: true
hide-personas-partial: true
id: 5d433ab511dfe7000134faca
+hidden: true
---
[Criteo Offline Conversions](https://www.criteo.com/?utm_source=segmentio&utm_medium=docs&utm_campaign=partners){:target="_blank”} enables offline event tracking so marketers can run Omnichannel Campaigns by leveraging deterministic matching of SKU-level offline sales data with online user profiles. Criteo can predict which store the shopper prefers to visit and deliver personalized recommendations for products that entice them to visit and purchase.
diff --git a/src/connections/sources/catalog/libraries/website/javascript/custom-proxy.md b/src/connections/sources/catalog/libraries/website/javascript/custom-proxy.md
index 7bd144d8a2..a553d45a61 100644
--- a/src/connections/sources/catalog/libraries/website/javascript/custom-proxy.md
+++ b/src/connections/sources/catalog/libraries/website/javascript/custom-proxy.md
@@ -38,6 +38,9 @@ You need to set up two important parts, regardless of the CDN provider you use:
> info ""
> Segment only has the ability to enable the proxy setting for the Web (Analytics.js) source. Details for mobile source proxies are in the [Analytics-iOS](/docs/connections/sources/catalog/libraries/mobile/ios/#proxy-https-calls) and [Analytics-Android](/docs/connections/sources/catalog/libraries/mobile/android/#proxying-http-calls) documentation. It is not currently possible to set up a proxy for server sources using the Segment UI.
+> info "Segment loads most integrations through the proxy, except for third-party SDKs"
+> Third-party SDKs are loaded by a partner's CDN, even with a Segment proxy configured. For example, if you have a Segment custom proxy enabled and send data to a FullStory destination, FullStory's CDN would load the FullStory SDK.
+
## Custom Proxy setup
There are two options you can choose from when you set up your custom domain proxy.
@@ -63,6 +66,8 @@ A Segment Customer Success team member will respond that they have enabled this
> info ""
> The **Host Address** field does not appear in source settings until it's enabled by Segment Customer Success.
+There should be no downtime once the setup is complete, as the default Segment domains continue to work alongside the customer's domains.
+
## Custom CDN / API Proxy
diff --git a/src/connections/sources/catalog/libraries/website/javascript/index.md b/src/connections/sources/catalog/libraries/website/javascript/index.md
index 1dc5df0565..a247af0176 100644
--- a/src/connections/sources/catalog/libraries/website/javascript/index.md
+++ b/src/connections/sources/catalog/libraries/website/javascript/index.md
@@ -823,18 +823,18 @@ Because Segment tracks across subdomains, you can either use the same Segment so
UTM parameters are only used when linking to your site from outside your domain. When a visitor arrives using a link containing UTM parameters, Segment's analytics.js library will parse the URL query string and add the information to the event payload. For more information about UTM tracking, see the [Tracking Customers Across Channels and Devices](/docs/guides/how-to-guides/cross-channel-tracking/) documentation.
-UTM parameters contain three essential components (utm_source, utm_medium, utm_campaign) and two optional (utm_content, utm_term). For example, if you include the following three parameters in your URL: ?utm_source=mysource&utm_medium=email&utm_campaign=mytestcampaign, once a visitor arrives using a link containing the above, Segment automatically grabs the UTM parameters and subsequent events will contain these parameters within the 'context' object (visible in the raw view of your Source Debugger.)
+UTM parameters contain three essential components (utm_source, utm_medium, utm_campaign) and two optional (utm_content, utm_term). For example, if you include the following three parameters in your URL: `?utm_source=mysource&utm_medium=email&utm_campaign=mytestcampaign`, once a visitor arrives using a link containing the above, Segment automatically grabs the UTM parameters and subsequent events will contain these parameters within the 'context' object (visible in the raw view of your Source Debugger.)
So, for example, if somebody follows the link with above query string to your site, the subsequent 'page' call in your Debugger should contain the below and will be passed to any enabled destinations:
-
+```js
"context": {
"campaign": {
"medium": "email",
"name": "mytestcampaign",
"source": "mysource",
},
-
+```
Whenever the UTM parameters are no longer a part of the URL, Segment no longer includes them. For example, if the user goes to a new page within your website which does not contain these parameters, they will not be included in subsequent events. UTM parameters are non-persistent by default as they could potentially cause data accuracy problems. Here's an example of why: Say a user clicks on an ad and lands on your site. He navigates around and bookmarks an internal page - or maybe shares a link with a friend, who shares it with another friend. All those links would then point back to the same test utm_source as the initial referrer for any purchase.
diff --git a/src/connections/storage/catalog/bigquery/index.md b/src/connections/storage/catalog/bigquery/index.md
index 899bb27aef..84ff49f81c 100644
--- a/src/connections/storage/catalog/bigquery/index.md
+++ b/src/connections/storage/catalog/bigquery/index.md
@@ -36,7 +36,7 @@ To create a project and enable BigQuery:
### Create a service account for Segment
To create a service account for Segment:
-1. From the Navigation panel on the left, select **IAM & admin** > **Service accounts**.
+1. Open the Google Developer Console, select the Navigation panel and navigate to **IAM & admin** > **Service accounts**.
2. Click **Create Service Account**.
3. Enter a name for the service account (for example, `segment-warehouses`) and click **Create**.
4. Assign the service account the following roles:
diff --git a/src/engage/journeys/build-journey.md b/src/engage/journeys/build-journey.md
index 1f78d07ff5..d9973ff84c 100644
--- a/src/engage/journeys/build-journey.md
+++ b/src/engage/journeys/build-journey.md
@@ -144,7 +144,7 @@ To let users re-enter a Journey they've exited, you'll need to enable two Journe
Journeys exits users based off of the exit time you configure. Users can re-enter the Journey once they meet the Journey's entry condition again and your defined re-entry time has passed. You can configure re-entry time by hour, day, or week. Re-entry time begins once a user exits the Journey.
-Suppose, for example, you enable re-entry for an abandoned cart campaign. You set exit to seven days and re-entry to 30 days. A user who abandons their cart will progress through the Journey and exit no later than seven days after entering. Once 30 days after exit have passed, the user can re-enter the Journey.
+Suppose, for example, you enable re-entry for an abandoned cart campaign. You set exit to seven days and re-entry to 30 days. A user who abandons their cart will progress through the journey and exit no later than seven days after entering. Once 30 days after exit have passed, the user will immediately re-enter the journey if the user still satisfies the journey's entry condition.
> info "Ad-based exit settings"
> Exit settings you configure for the [Show an ad step](/docs/engage/journeys/step-types/#show-an-ad) don't impact other Journey steps. Users can exit an ad step but remain in the Journey.
diff --git a/src/engage/journeys/journey-context.md b/src/engage/journeys/journey-context.md
new file mode 100644
index 0000000000..5798c6d3ed
--- /dev/null
+++ b/src/engage/journeys/journey-context.md
@@ -0,0 +1,146 @@
+---
+title: Journey Context
+plan: engage-foundations
+hidden: true
+---
+
+[Event-Triggered Journeys](/docs/engage/journeys/event-triggered-journeys/) redefine how you orchestrate and personalize customer experiences.
+
+This page explains Journey context, which can help you dynamically adapt each journey to individual user interactions, creating highly relevant, real-time workflows.
+
+> info "Private Beta"
+> Event-Triggered Journeys is in private beta, and Segment is actively working on this feature. Some functionality may change before it becomes generally available. During private beta, Event-Triggered Journeys is not HIPAA eligible.
+
+## Overview
+
+Unlike traditional audience-based journeys, which rely solely on user progress through predefined steps, event-triggered journeys capture and store the details of user-triggered events. This shift allows you to access the data that caused users to reach a specific step and use it to make more precise decisions throughout the journey.
+
+With journey context, you can:
+
+- Split journeys based on event attributes or outcomes.
+- Personalize customer experiences using real-time event data.
+- Enable advanced use cases like abandonment recovery, dynamic delays, and more.
+
+## What is Journey context?
+
+Journey context is a flexible data structure that captures key details about the events and conditions that shape a customer’s journey. Journey context provides a point-in-time snapshot of event properties, making accurate and reliable data available throughout the journey.
+
+Journey context stores:
+- **Event properties**: Information tied to specific user actions, like `Appointment ID` or `Order ID`.
+- **Split evaluations**: Results of branch decisions made during the journey, enabling future steps to reference these outcomes.
+
+Journey context doesn't store:
+- **Profile traits**, which may change over time.
+- **Audience memberships**, which can evolve dynamically.
+
+This focused approach ensures journey decisions are always based on static, reliable data points.
+
+### Examples of stored context
+
+Event properties are the foundation of Journey context. Examples of event properties include:
+
+- **Appointment Scheduled:**
+ - `Appointment ID`
+ - `Appointment Start Time`
+ - `Appointment End Time`
+ - `Assigned Provider Name`
+- **Order Completed:**
+ - `Cart ID`
+ - `Order ID`
+ - An array of cart contents
+
+Segment captures each event’s properties as a point-in-time snapshot when the event occurs, ensuring that the data remains consistent for use in personalization, branching, and other advanced workflow steps.
+
+## Using Journey context in Event-Triggered Journeys
+
+Journey context provides the framework for capturing and referencing data about events and conditions within a journey. It allows Event-Triggered Journeys to dynamically respond to user behavior by making event-specific data available for decisions and actions at each step.
+
+This is useful for scenarios like:
+
+- **Abandonment recovery:** Checking whether a user completed a follow-up action, like a purchase.
+- **Customizing messages:** Using event properties to include relevant details in communications.
+- **Scheduling workflows:** Triggering actions based on contextual data, like the time of a scheduled appointment.
+
+By incorporating event-specific data at each step, journey context helps workflows remain relevant and adaptable to user actions.
+
+### Journey steps that use context
+
+Journey context gets referenced and updated at various steps in an event-triggered journey. Each step plays a specific role in adapting the journey to user behavior or conditions.
+
+#### Wait for event split
+
+This step checks whether a user performs a specific event within a given time window. If the event occurs, Segment adds its details to journey context for use in later steps.
+
+For example, a journey may wait to see if a `checkout_completed` event occurs within two hours of a user starting checkout. If the event happens, the workflow can proceed; otherwise, it may take an alternate path. The data captured includes event properties (like `Order ID`) and the results of the split evaluation.
+
+#### Context split
+
+This step evaluates conditions using data already stored in journey context. Based on the conditions, users are routed to different branches of the journey.
+
+For example, a user who triggers an event with a property like `order_value > 100` might be routed to one branch, while other users follow a different path. The split uses attributes from journey context, like event properties or prior split outcomes, to determine the appropriate branch.
+
+#### Profile data split
+
+This step evaluates user traits or audience memberships to determine branching. While Segment doesn't store profile data in journey context, it complements the static data available in the journey.
+
+For example, users in a premium audience can be directed to a tailored experience, while others follow the standard flow. Segment stores the results of this split in journey context for reference in later steps.
+
+#### Contextual delay
+
+A contextual delay introduces a wait period based on time-related data in journey context. This keeps workflows aligned with real-world events.
+
+For example, a journey can wait until one hour before an `Appointment Start Time` to send a reminder email. The delay reads from journey context but doesn't add any new data to it.
+
+#### Function steps
+
+Function steps process data from journey context through custom logic. The output of the function gets written back to context for use in later steps.
+
+For example, a function might calculate a discount percentage based on an event property, then store that value in journey context for later use. The output gets scoped to a dedicated object (`function_output`) to keep the context structured and reliable.
+
+#### Send to destination
+
+The send to destination step allows journey context data to be included in payloads sent to external tools, like messaging platforms or analytics systems.
+
+For example, a payload sent to a messaging platform might include `Order ID` and `Cart Contents` to personalize the message. Users can select which parts of journey context to include in the payload.
+
+## Context structure
+
+The structure of journey context organizes event-specific data gets and makes it accessible throughout the journey workflow. By standardizing how data is stored, Segment makes it easier to reference, use, and send this information at different stages of a journey.
+
+Journey context is organized as a collection of key-value pairs, where each key represents a data point or category, and its value holds the associated data. This structure supports various types of information, like event properties, split outcomes, and function outputs.
+
+For example, when a user triggers an event like `Appointment Scheduled`, Segment stores its properties (like `Appointment ID`, `Appointment Start Time`) as key-value pairs. You can then reference these values in later journey steps or include them in external payloads.
+
+The following example shows how journey context might look during a workflow. In this case, the user scheduled an appointment, and the workflow added related event data to the context:
+
+```json
+{
+ "appointment_scheduled": {
+ "appointment_id": "12345",
+ "start_time": "2024-12-06T10:00:00Z",
+ "end_time": "2024-12-06T11:00:00Z",
+ "provider_name": "Dr. Smith"
+ },
+ "split_decision": {
+ "split_name": "appointment_type_split",
+ "branch_chosen": "existing_patient"
+ },
+ "function_output": {
+ "discount_percentage": 15
+ }
+}
+```
+
+This payload contains:
+
+- **Event properties**: Captured under the `appointment_scheduled` key.
+- **Split outcomes**: Documented in the `split_decision` object.
+- **Function results**: Stored in the `function_output` object for use in later steps.
+
+## Journey context and Event-Triggered Journeys
+
+Journey context underpins the flexibility and precision of Event-Triggered Journeys. By capturing key details about events and decisions as they happen, journey context lets workflows respond dynamically to user actions and conditions.
+
+Whether you're orchestrating real-time abandonment recovery, scheduling contextual delays, or personalizing messages with event-specific data, journey context provides the tools to make your workflows more relevant and effective.
+
+To learn more about how Event-Triggered Journeys work, visit the [Event-Triggered Journeys documentation](/docs/engage/journeys/event-triggered-journeys/).
\ No newline at end of file
diff --git a/src/guides/usage-and-billing/account-management.md b/src/guides/usage-and-billing/account-management.md
index 3ce1d2c280..c5ba2075e6 100644
--- a/src/guides/usage-and-billing/account-management.md
+++ b/src/guides/usage-and-billing/account-management.md
@@ -59,7 +59,7 @@ Though workspaces can't be merged, you can move an existing source to a single w
To move a source between workspaces, navigate to the source's **Settings** tab, then click **Transfer to Workspace**. Choose the workspace you're moving the source to, then click **Transfer Source**.
-When you transfer a source from one workspace to another, all of your connected destinations aren't transferred. You must manually reconnect these destinations and settings.
+When you transfer a source from one workspace to another, Segment migrates all connected non-storage destinations.
> info ""
> The person who transfers the source must be a [workspace owner](/docs/segment-app/iam/) for both the origin and recipient workspaces, otherwise the recipient workspace won't appear in the dropdown list.
diff --git a/src/privacy/account-deletion.md b/src/privacy/account-deletion.md
index 68cfafda75..58e2201adb 100644
--- a/src/privacy/account-deletion.md
+++ b/src/privacy/account-deletion.md
@@ -2,7 +2,7 @@
title: Account & Data Deletion
---
-Segment allows you to delete specific data relating to an individual end user, all data from associated with a source, or all data within your entire workspace.
+Segment allows you to delete specific data relating to an individual end user, all data from associated with a source, all data related to a Unify space, or all data in your entire workspace.
## Delete individual user data
To delete the data for an individual user from you workspace, follow the instructions on the [User Deletion and Suppression](/docs/privacy/user-deletion-and-suppression) page.
@@ -18,6 +18,17 @@ To delete the data for an entire source, email the Customer Success team [(frien
> note "Deleting source data"
> When Segment deletes your data for a particular source, the deletion is not forwarded to sources or data storage providers associated with your account: your data is only removed from Segment's S3 archive buckets. To remove your data from external sources, reach out to the individual source about their deletion practices.
+## Delete the data from a Unify space
+
+Workspace Owners can delete a Unify space and all of its associated data by sending an email to the Customer Success team [(friends@segment.com)](mailto:friends@segment.com) to create a support ticket. In your email to Customer Success, include the following information:
+ - Workspace slug
+ - Unify space name
+
+Segment waits for 5 calendar days after your request before starting a space deletion. If you want to cancel your Unify space deletion request, email the Customer Success team [(friends@segment.com)](mailto:friends@segment.com) during the first 5 calendar days after your initial request.
+
+> info "Data removed during a Unify space deletion"
+> When you delete a Unify space, Segment removes all profiles, computed traits, audiences, journeys, and other settings related to the Unify space from internal Segment servers. Unify space deletion doesn't delete data from connected Twilio Engage destinations. To remove your data from external destinations, reach out to the individual destination about their deletion practices.
+
## Delete your workspace data
Workspace admins can delete all of the data associated with a workspace, including customer data.
@@ -37,7 +48,7 @@ Workspace admins can delete all of the data associated with a workspace, includi
After you delete your workspace or account, Segment removes all data associated with each workspace within 30 days in a process called a [complete data purge](#what-is-a-complete-data-purge). For a data purge status update, email the Customer Success team [(friends@segment.com)](mailto:friends@segment.com).
-If you do not delete your workspace after you stop using Segment, **your data remains in Segment's internal servers until you submit a written deletion request**.
+If you don't delete your workspace after you stop using Segment, **your data remains in Segment's internal servers until you submit a written deletion request**.
> warning "Purging data from workspaces deleted prior to March 31, 2022"
> If you deleted your workspace prior to March 31, 2022, and would like to have data associated with your workspace purged from Segment's S3 archive buckets, email the Customer Success team [(friends@segment.com)](mailto:friends@segment.com) to create a support ticket. In your email to Customer Success, include either the slug or the ID of the workspace you'd like to have purged from internal Segment servers.
diff --git a/src/privacy/consent-management/onetrust-wrapper.md b/src/privacy/consent-management/onetrust-wrapper.md
index 0e38a12629..6e1538deb6 100644
--- a/src/privacy/consent-management/onetrust-wrapper.md
+++ b/src/privacy/consent-management/onetrust-wrapper.md
@@ -3,12 +3,13 @@ title: Analytics.js OneTrust Wrapper
plan: consent-management
---
-This guide about Segment's Analytics.js OneTrust wrapper contains context about which configurations might cause data loss, steps you can take to remediate data loss, and configurations that minimize data loss.
+This guide to Segment's Analytics.js OneTrust wrapper contains context about which configurations might cause data loss, steps you can take to remediate data loss, configurations that minimize data loss, and a guide to expected wrapper behavior.
For questions about OneTrust Consent and Preference Management behavior, see the [OneTrust documentation](https://my.onetrust.com/s/topic/0TO3q000000kIWOGA2/universal-consent-preference-management?language=en_US){:target="_blank"}.
For questions about the Analytics.js OneTrust wrapper, see the [@segment/analytics-consent-wrapper-onetrust](https://github.com/segmentio/analytics-next/tree/master/packages/consent/consent-wrapper-onetrust){:target="_blank"} repository.
+
## OneTrust consent banner behavior
The OneTrust consent banner has three key UI configurations that control how the banner and consent preferences behave:
@@ -185,3 +186,18 @@ You might experience data loss if a user navigates away from a landing page befo
+
+
+## Expected wrapper behavior
+
+The following table explains how Segment's OneTrust wrapper works with different configurations of consent categories and destination behaviors.
+
+| Consent categories | Unmapped destinations | Mapped destinations | Wrapper behavior |
+| ------------------ | --------------------- | ------------------- | ---------------- |
+| All categories are disabled | No unmapped destinations
**or**
All unmapped destinations are disabled | Any configuration | No data flows to Segment |
+| All categories are disabled | At least 1 enabled destination is not mapped to a consent category | Any configuration | Data flows to Segment |
+| All categories are disabled | S3 destination is unmapped | Any configuration | Data flows to Segment |
+| One or more categories are enabled | No unmapped destinations
**or**
All unmapped destinations are disabled | All destinations are disabled | No data flows to Segment |
+| One or more categories are enabled | No unmapped destinations
**or**
All unmapped destinations are disabled | One or more destinations are enabled | Data flows to Segment |
+| One or more categories are enabled | One or more destinations are enabled | All destinations are disabled | Data flows to Segment |
+| One or more categories are enabled | One or more destinations are enabled | One or more destinations are enabled | Data flows to Segment |
\ No newline at end of file
diff --git a/src/unify/data-graph/index.md b/src/unify/data-graph/index.md
index 8cf518a981..8e8195d787 100644
--- a/src/unify/data-graph/index.md
+++ b/src/unify/data-graph/index.md
@@ -29,8 +29,8 @@ To use the Data Graph, you'll need the following:
> Data Graph currently only supports workspaces in the United States.
To get started with the Data Graph, set up the required permissions in your warehouse. Segment supports the following:
-- Linked Audiences: [Snowflake](/docs/unify/data-graph/setup-guides/snowflake-setup/) and [Databricks](/docs/unify/data-graph/setup-guides/databricks-setup/)
-- Linked Events: [Snowflake](/docs/unify/data-graph/setup-guides/snowflake-setup/), [Databricks](/docs/unify/data-graph/setup-guides/databricks-setup/), [BigQuery](/docs/unify/data-graph/setup-guides/BigQuery-setup/), and [Redshift](/docs/unify/data-graph/setup-guides/redshift-setup/)
+- Linked Audiences: [BigQuery](/docs/unify/data-graph/setup-guides/BigQuery-setup/), [Databricks](/docs/unify/data-graph/setup-guides/databricks-setup/), and [Snowflake](/docs/unify/data-graph/setup-guides/snowflake-setup/)
+- Linked Events: [BigQuery](/docs/unify/data-graph/setup-guides/BigQuery-setup/),[Databricks](/docs/unify/data-graph/setup-guides/databricks-setup/),[Redshift](/docs/unify/data-graph/setup-guides/redshift-setup/), and [Snowflake](/docs/unify/data-graph/setup-guides/snowflake-setup/)
To track the data sent to Segment on previous syncs, Segment uses [Reverse ETL](/docs/connections/reverse-etl/) infrastructure to store diffs in tables within a dedicated schema called `_segment_reverse_etl` in your data warehouse. You can choose which database or project in your warehouse this data lives in.
diff --git a/src/unify/data-graph/setup-guides/BigQuery-setup.md b/src/unify/data-graph/setup-guides/BigQuery-setup.md
index 1ffc64f459..53a07c61a3 100644
--- a/src/unify/data-graph/setup-guides/BigQuery-setup.md
+++ b/src/unify/data-graph/setup-guides/BigQuery-setup.md
@@ -30,7 +30,19 @@ To set the roles and permissions:
11. Copy all the content in the JSON file you created in the previous step, and save it for Step 5.
-## Step 2: Grant read-only access for the Data Graph
+## Step 2: Create a dataset for Segment to store checkpoint tables
+Create a new dataset as Segment requires write access to the dataset for internal bookkeeping and to store checkpoint tables for the queries that are executed.
+
+Segment recommends you to create a new dataset for the Data Graph. If you choose to use an existing dataset that has also been used for [Segment Reverse ETL](/docs/connections/reverse-etl/), you must follow the [additional instructions](/docs/unify/data-graph/setup-guides/bigquery-setup/#update-user-access-for-segment-reverse-etl-dataset) to update user access for the Segment Reverse ETL catalog.
+
+To create your dataset, navigate to the BigQuery SQL editor and create a dataset that will be used by Segment.
+
+```
+CREATE SCHEMA IF NOT EXISTS `__segment_reverse_etl`;
+GRANT `roles/bigquery.dataEditor` ON SCHEMA `__segment_reverse_etl` TO "serviceAccount:";
+```
+
+## Step 3: Grant read-only access for the Data Graph
Grant the [BigQuery Data Viewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.dataViewer){:target="_blank"} role to the service account at the project level. Make sure to grant read-only access to the Profiles Sync project in case you have a separate project.
To grant read-only access for the Data Graph:
@@ -41,7 +53,7 @@ To grant read-only access for the Data Graph:
5. Select the **BigQuery Data Viewer role**.
6. Click **Save**.
-## *(Optional)* Step 3: Restrict read-only access
+## *(Optional)* Step 4: Restrict read-only access
If you want to restrict access to specific datasets, grant the BigQuery Data Viewer role on datasets to the service account. Make sure to grant read-only access to the Profiles Sync dataset.
To restrict read-only access:
@@ -58,7 +70,7 @@ You can also run the following command:
GRANT `roles/bigquery.dataViewer` ON SCHEMA `YOUR_DATASET_NAME` TO "serviceAccount:";
```
-## Step 4: Validate permissions
+## Step 5: Validate permissions
1. Navigate to **IAM & Admin > Service Accounts** in BigQuery.
2. Search for the service account you’ve just created.
3. From your service account, click the three dots under **Actions** and select **Manage permissions**.
@@ -66,7 +78,7 @@ GRANT `roles/bigquery.dataViewer` ON SCHEMA `YOUR_DATASET_NAME` TO "serviceAccou
5. Select a box with List resources within resource(s) matching your query.
6. Click **Analyze**, then click **Run query**.
-## Step 5: Connect your warehouse to Segment
+## Step 6: Connect your warehouse to Segment
1. Navigate to **Unify > Data Graph** in Segment. This should be a Unify space with Profiles Sync already set up.
2. Click **Connect warehouse**.
3. Select *BigQuery* as your warehouse type.