diff --git a/.github/scripts/versions.json b/.github/scripts/versions.json
index c24f756d41d..18ae0c6de1c 100644
--- a/.github/scripts/versions.json
+++ b/.github/scripts/versions.json
@@ -48,6 +48,13 @@
"Latest Version rolled out": "Not versioned (Braze maintains backward compatibility without versioning)",
"Sunset Date Current Version used in backend": "Not Available"
},
+ {
+ "Destination": "Campaign Manager",
+ "Version currently referred in CloudMode Services": "/dfareporting/v5/userprofiles/:profileId/conversions/batchinsert, /dfareporting/v5/userprofiles/:profileId/conversions/batchupdate",
+ "Link to check Versions documentation": "https://developers.google.com/doubleclick-advertisers/rest/v5/ https://developers.google.com/doubleclick-advertisers/deprecation",
+ "Latest Version rolled out": "v5",
+ "Sunset Date Current Version used in backend": "v4 is deprecated and sunsets on Feb 26, 2026. v5 is current (no sunset announced)."
+ },
{
"Destination": "CleverTap",
"Version currently referred in CloudMode Services": "/1/upload, /1/delete/profiles.json",
@@ -64,10 +71,10 @@
},
{
"Destination": "Criteo Audience",
- "Version currently referred in CloudMode Services": "/2025-04/audiences/:audienceId/contactlist",
+ "Version currently referred in CloudMode Services": "/2025-10/audiences/:audienceId/contactlist",
"Link to check Versions documentation": "https://developers.criteo.com/marketing-solutions/docs/versioning-policy",
"Latest Version rolled out": "v2025-10 (supported until October 2026)",
- "Sunset Date Current Version used in backend": "April 2026 (12 months after release - version 2025-04 released April 2025)"
+ "Sunset Date Current Version used in backend": "October 2026 (12 months after release - version 2025-10 released October 2025)"
},
{
"Destination": "Customer IO",
@@ -112,25 +119,25 @@
"Sunset Date Current Version used in backend": "Not Available"
},
{
- "Destination": "GAEC",
- "Version currently referred in CloudMode Services": "/v19/customers/:customerId/uploadClickConversions, /v19/customers/:customerId/uploadCallConversions, /v19/customers/:customerId/googleAds:searchStream, /v19/customers/:customerId/offlineUserDataJobs",
- "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/release-notes",
- "Latest Version rolled out": "v20",
- "Sunset Date Current Version used in backend": "v19 will be deprecated in February 2026"
+ "Destination": "GOOGLE_ADWORDS_ENHANCED_CONVERSIONS",
+ "Version currently referred in CloudMode Services": "/v22/customers/:customerId/uploadClickConversions, /v22/customers/:customerId/uploadCallConversions, /v22/customers/:customerId/googleAds:searchStream, /v22/customers/:customerId/offlineUserDataJobs (currently used: v22)",
+ "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/sunset-dates https://developers.google.com/google-ads/api/docs/release-notes",
+ "Latest Version rolled out": "v23 (released Jan 28, 2026)",
+ "Sunset Date Current Version used in backend": "v22 sunset: October 2026 (tentative). Latest available: v23 (released Jan 28, 2026) with sunset February 2027."
},
{
"Destination": "GOOGLE_ADWORDS_OFFLINE_CONVERSIONS",
- "Version currently referred in CloudMode Services": "/v19/customers/:customerId/uploadClickConversions, /v19/customers/:customerId/uploadCallConversions, /v19/customers/:customerId/googleAds:searchStream, /v19/customers/:customerId/offlineUserDataJobs",
- "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/sunset-dates",
- "Latest Version rolled out": "v20",
- "Sunset Date Current Version used in backend": "v19 will be deprecated in February 2026"
+ "Version currently referred in CloudMode Services": "/v22/customers/:customerId/uploadClickConversions, /v22/customers/:customerId/uploadCallConversions, /v22/customers/:customerId/googleAds:searchStream, /v22/customers/:customerId/offlineUserDataJobs (currently used: v22)",
+ "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/sunset-dates https://developers.google.com/google-ads/api/docs/release-notes",
+ "Latest Version rolled out": "v23 (released Jan 28, 2026)",
+ "Sunset Date Current Version used in backend": "v22 sunset: October 2026 (tentative). Latest available: v23 (released Jan 28, 2026) with sunset February 2027."
},
{
"Destination": "GOOGLE_ADWORDS_REMARKETING_LISTS",
- "Version currently referred in CloudMode Services": "/v19/customers/:customerId/offlineUserDataJobs, /v19/customers/:customerId/offlineUserDataJobs/:jobId:create, /v19/customers/:customerId/offlineUserDataJobs/:jobId:addOperations",
- "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/sunset-dates\nhttps://developers.google.com/google-ads/api/docs/release-notes",
- "Latest Version rolled out": "v20",
- "Sunset Date Current Version used in backend": "v17 @June 4, 2025 v18 September 2025, v19 will be deprecated in February 2026"
+ "Version currently referred in CloudMode Services": "/v22/customers/:customerId/offlineUserDataJobs, /v22/customers/:customerId/offlineUserDataJobs/:jobId:create, /v22/customers/:customerId/offlineUserDataJobs/:jobId:addOperations (currently used: v22)",
+ "Link to check Versions documentation": "https://developers.google.com/google-ads/api/docs/sunset-dates https://developers.google.com/google-ads/api/docs/release-notes",
+ "Latest Version rolled out": "v23 (released Jan 28, 2026)",
+ "Sunset Date Current Version used in backend": "v22 sunset: October 2026 (tentative). Latest available: v23 (released Jan 28, 2026) with sunset February 2027."
},
{
"Destination": "Google Analytics 4 (GA4)",
diff --git a/.github/workflows/allure-test-reporter.yml b/.github/workflows/allure-test-reporter.yml
deleted file mode 100644
index e227a3027d3..00000000000
--- a/.github/workflows/allure-test-reporter.yml
+++ /dev/null
@@ -1,119 +0,0 @@
-name: Allure Test Reporter
-
-on:
- pull_request:
- types:
- - opened
- - reopened
- - synchronize
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
- cancel-in-progress: true
-
-permissions:
- id-token: write
- contents: write # Required for gh-pages deployment
-
-jobs:
- test_and_publish:
- runs-on: ubuntu-latest
-
- steps:
- - name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
- with:
- egress-policy: audit
-
- - name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
-
- - name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
- with:
- node-version-file: '.nvmrc'
- cache: 'npm'
-
- - name: Install Dependencies
- run: npm ci && npm run prepare
-
- - name: Run Tests and Generate Report
- run: |
- npm run test:ts:silent
-
- - name: Install Allure
- run: npm install -g allure-commandline
-
- - name: Generate Allure Report
- run: |
- REPO_NAME=$(basename ${{ github.repository }})
- PR_NUMBER=${{ github.event.pull_request.number }}
- REPORT_FOLDER="${REPO_NAME}/${PR_NUMBER}"
- allure generate allure-results --clean -o "${REPORT_FOLDER}"
- echo "REPORT_FOLDER=${REPORT_FOLDER}" >> $GITHUB_ENV # Persist this variable
-
- - name: Checkout Reports Repository
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- with:
- repository: rudderlabs/test-reports
- token: ${{ secrets.PAT }}
- path: test-reports
-
- - name: Copy Allure Report to Reports Repository
- run: |
- mkdir -p "test-reports/$REPORT_FOLDER"
- cp -r $REPORT_FOLDER/* test-reports/$REPORT_FOLDER/
-
- - name: Cleanup Old Reports (Keep Only Last 50)
- run: |
- REPO_NAME=$(basename ${{ github.repository }})
- cd test-reports/${REPO_NAME}
- ls -t | tail -n +51 | xargs rm -rf || echo "No old reports to delete"
-
- - name: Commit and Push Report
- env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
- run: |
- cd test-reports
- git config --global user.name "github-actions"
- git config --global user.email "github-actions@github.com"
-
- # Stash any unstaged changes before pulling
- git add .
- git stash || echo "No changes to stash"
-
- # Pull latest changes safely
- git pull --rebase origin main
-
- # Apply the stashed changes back
- git stash pop || echo "No stash to apply"
-
- # Commit new report if there are changes
- git add .
- git commit -m "chore: add allure report for $REPORT_FOLDER" || echo "No changes to commit"
-
- # Push changes with retry logic
- for i in {1..5}; do
- git push origin main && break || sleep 5
- git pull --rebase origin main
- done
-
- - name: Add Test Report Link as Comment on PR
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- with:
- github-token: ${{ secrets.PAT }}
- script: |
- const { owner, repo } = context.repo;
- const prNumber = context.payload.pull_request.number;
-
- const reportFolder = process.env.REPORT_FOLDER; // Read from environment variable
- const commentBody = `Allure Test reports for this run are available at:
- - Allure Report: [View Report](https://rudderlabs.github.io/test-reports/${reportFolder}/index.html)`;
-
- // Comment on the pull request
- await github.rest.issues.createComment({
- owner,
- repo,
- issue_number: prNumber,
- body: commentBody
- });
diff --git a/.github/workflows/build-pr-artifacts.yml b/.github/workflows/build-pr-artifacts.yml
index 3036a4fc8a2..e62b82b3f16 100644
--- a/.github/workflows/build-pr-artifacts.yml
+++ b/.github/workflows/build-pr-artifacts.yml
@@ -7,10 +7,6 @@ on:
- reopened
- synchronize
-permissions:
- contents: read
- id-token: write # allows the JWT to be requested from GitHub's OIDC provider
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -19,6 +15,8 @@ jobs:
generate-tag-names:
runs-on: ubuntu-latest
name: Generate Tag Names
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
# Skip for the release pull requests as staging artifacts will be generated
if: startsWith(github.event.pull_request.head.ref, 'release/') != true && startsWith(github.event.pull_request.head.ref, 'hotfix-release/') != true && github.event.pull_request.head.ref != 'master'
outputs:
@@ -26,12 +24,12 @@ jobs:
tag_name_ut: ${{ steps.gen_tag_names.outputs.tag_name_ut }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -40,15 +38,18 @@ jobs:
id: gen_tag_names
run: |
tag_name=$(echo ${{ github.head_ref }} | tr "/" .)
- echo "Tag Name: $tag_name"
- echo "tag_name=$tag_name" >> $GITHUB_OUTPUT
+ echo "Tag Name: branch-$tag_name"
+ echo "tag_name=branch-$tag_name" >> $GITHUB_OUTPUT
tag_name_ut="ut-$tag_name"
- echo "UT Tag Name: $tag_name_ut"
- echo "tag_name_ut=$tag_name_ut" >> $GITHUB_OUTPUT
+ echo "UT Tag Name: branch-$tag_name_ut"
+ echo "tag_name_ut=branch-$tag_name_ut" >> $GITHUB_OUTPUT
build-transformer-image:
name: Build Transformer Docker Image - PR
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
# Skip for the release pull requests as staging artifacts will be generated
# Skip main to develop sync pull requests
if: startsWith(github.event.pull_request.head.ref, 'release/') != true && startsWith(github.event.pull_request.head.ref, 'hotfix-release/') != true && github.event.pull_request.head.ref != 'master'
@@ -61,6 +62,7 @@ jobs:
dockerfile: Dockerfile
load_target: development
push_target: production
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -68,6 +70,9 @@ jobs:
build-user-transformer-image:
name: Build User Transformer Docker Image - PR
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
# Skip for the release pull requests as staging artifacts will be generated
if: startsWith(github.event.pull_request.head.ref, 'release/') != true && startsWith(github.event.pull_request.head.ref, 'hotfix-release/') != true && github.event.pull_request.head.ref != 'master'
needs: [generate-tag-names]
@@ -79,6 +84,7 @@ jobs:
dockerfile: Dockerfile-ut-func
load_target: development
push_target: production
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -86,10 +92,12 @@ jobs:
run-ingestion-service-test:
name: Run Ingestion Service Test
+ permissions:
+ contents: read # to call reusable workflow (ingestion-service-test.yml)
needs: [build-transformer-image, generate-tag-names]
uses: ./.github/workflows/ingestion-service-test.yml
with:
build_tag: rudderstack/develop-rudder-transformer:${{ needs.generate-tag-names.outputs.tag_name }}
secrets:
- PAT: ${{ secrets.PAT }}
DOCKERHUB_TOKEN: ${{ secrets.DOCKERHUB_TOKEN }}
+ RELEASE_PRIVATE_KEY: ${{ secrets.RELEASE_PRIVATE_KEY }}
diff --git a/.github/workflows/build-push-docker-image.yml b/.github/workflows/build-push-docker-image.yml
index ab20acd0c3b..9cb6b1cbc7a 100644
--- a/.github/workflows/build-push-docker-image.yml
+++ b/.github/workflows/build-push-docker-image.yml
@@ -3,13 +3,10 @@ name: Build Transformer Docker Image
on:
workflow_call:
inputs:
- build_tag:
+ image_repo:
required: true
type: string
- push_tags:
- required: true
- type: string
- img_tag:
+ image_tag:
required: true
type: string
dockerfile:
@@ -30,8 +27,9 @@ on:
type: boolean
default: false
description: if this option is true, we would skip tests while building docker image
- workflow_url:
- type: string
+ create_latest_tag:
+ required: true
+ type: boolean
secrets:
DOCKERHUB_TOKEN:
required: true
@@ -52,11 +50,13 @@ jobs:
check_actor:
runs-on: ubuntu-latest
name: Check if actor is dependabot
+ permissions:
+ contents: read # minimum required permission for job execution
outputs:
is_dependabot: ${{ steps.check.outputs.is_dependabot }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
@@ -74,11 +74,13 @@ jobs:
get_sha:
runs-on: ubuntu-latest
name: Get SHA information
+ permissions:
+ contents: read # minimum required permission for job execution
outputs:
sha: ${{steps.getSHA.outputs.SHA}}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
@@ -96,16 +98,18 @@ jobs:
get_changed_files:
runs-on: ubuntu-latest
name: Get Changed files
+ permissions:
+ contents: read # to checkout repository code and list changed files (actions/checkout, Ana06/get-changed-files)
outputs:
should_execute_tests: ${{ steps.processing.outputs.should_execute_tests }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
- id: files
@@ -127,18 +131,32 @@ jobs:
echo "Match Found: $found"
echo "::set-output name=should_execute_tests::$found"
- build-transformer-image-arm64:
- name: Build Transformer Docker Image ARM64
- runs-on: [self-hosted, Linux, ARM64, ubuntu-22]
+ build-images:
+ name: Build Docker Images
+ permissions:
+ id-token: write # for AWS OIDC authentication (aws-actions/configure-aws-credentials)
+ contents: read # to checkout repository code (actions/checkout)
needs: [check_actor, get_sha, get_changed_files]
+ strategy:
+ matrix:
+ build-config:
+ - os: [self-hosted, Linux, ARM64]
+ tags: ${{ format('{0}:{1}', inputs.image_repo, inputs.image_tag) }}-arm64
+ image_tag: ${{ inputs.image_tag }}-arm64
+ platform: linux/arm64
+ - os: [self-hosted, Linux, X64]
+ tags: ${{ format('{0}:{1}', inputs.image_repo, inputs.image_tag) }}-amd64
+ image_tag: ${{ inputs.image_tag }}-amd64
+ platform: linux/amd64
+ runs-on: ${{ matrix.build-config.os }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
ref: ${{ needs.get_sha.outputs.sha }}
fetch-depth: 1
@@ -151,27 +169,21 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Setup Docker Buildx
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
+ uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
- # This step is needed to ensure we run tests on a locally built image.
- # The rudderlabs/build-scan-push-action does not respect the 'load' input parameter
- # because it's designed for building and pushing images, not loading them locally.
- - name: Build and load Docker Image
- id: build-and-push
- uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
+ - name: Build Docker Image
+ uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
with:
context: .
file: ${{ inputs.dockerfile }}
- load: true
- platforms: linux/arm64
- push: false
- tags: ${{ inputs.build_tag }}
target: ${{ inputs.load_target }}
+ load: true
+ tags: ${{ matrix.build-config.tags }}
- name: Run Tests
if: ${{ inputs.skip_tests != true || needs.get_changed_files.outputs.should_execute_tests == 'true' }}
env:
- BUILD_TAG: ${{ inputs.build_tag }}
+ BUILD_TAG: ${{ matrix.build-config.tags }}
run: |
echo "Running tests on Docker image: $BUILD_TAG"
echo "Running JavaScript tests..."
@@ -179,125 +191,121 @@ jobs:
echo "Running TypeScript tests..."
docker run --rm $BUILD_TAG npm run test:ts:ci
- - name: Build and Push Multi-platform Images
- uses: rudderlabs/build-scan-push-action@d4991410238cec1b416875f1eb0e7a4565ac29aa # v1.8.2
+ - name: Configure AWS Credentials
+ uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
+ with:
+ aws-region: ${{ vars.AWS_ECR_REGION }}
+ role-to-assume: ${{ vars.AWS_ECR_IAM_ROLE_ARN }}
+ role-session-name: ${{ github.event.repository.name }}-build-push
+
+ - name: Login to ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # 2.0.1
+
+ - name: Build and Push Multi-platform Images to ECR
+ uses: rudderlabs/build-scan-push-action@96d7bfca912dd2e8805dbb322dc2540504ecac1e # v2.1.0
if: ${{ needs.check_actor.outputs.is_dependabot == 'false' }}
with:
context: .
file: ${{ inputs.dockerfile }}
target: ${{ inputs.push_target }}
push: true
- tags: ${{ inputs.push_tags }}-arm64
- platforms: |
- linux/arm64
+ tags: ${{ format('{0}/{1}', steps.login-ecr.outputs.registry, matrix.build-config.tags) }}
+ platforms: ${{ matrix.build-config.platform }}
build-args: |
- version=${{ inputs.img_tag }}-arm64
+ version=${{ matrix.build-config.image_tag }}
GIT_COMMIT_SHA=${{ github.sha }}
- # cache-from: type=gha
- # cache-to: type=gha,mode=max
-
- build-transformer-image-amd64:
- name: Build Transformer Docker Image AMD64
- runs-on: [self-hosted, Linux, X64]
- needs: [check_actor, get_sha, get_changed_files]
- steps:
- - name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
- with:
- egress-policy: audit
-
- - name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- with:
- ref: ${{ needs.get_sha.outputs.sha }}
- fetch-depth: 1
- name: Login to DockerHub
- if: ${{ needs.check_actor.outputs.is_dependabot == 'false' }}
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
+ uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ env.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Setup Docker Buildx
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
-
- # This step is needed to ensure we run tests on a locally built image.
- # The rudderlabs/build-scan-push-action does not respect the 'load' input parameter
- # because it's designed for building and pushing images, not loading them locally.
- - name: Build and load Docker Image
- id: build-and-push
- uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83
- with:
- context: .
- file: ${{ inputs.dockerfile }}
- load: true
- platforms: linux/amd64
- push: false
- tags: ${{ inputs.build_tag }}
- target: ${{ inputs.load_target }}
-
- - name: Run Tests
- if: ${{ inputs.skip_tests != true || needs.get_changed_files.outputs.should_execute_tests == 'true' }}
- env:
- BUILD_TAG: ${{ inputs.build_tag }}
- run: |
- echo "Running tests on Docker image: $BUILD_TAG"
- echo "Running JavaScript tests..."
- docker run --rm $BUILD_TAG npm run test:js:ci
- echo "Running TypeScript tests..."
- docker run --rm $BUILD_TAG npm run test:ts:ci
-
- - name: Build and Push Multi-platform Images
- uses: rudderlabs/build-scan-push-action@d4991410238cec1b416875f1eb0e7a4565ac29aa # v1.8.2
+ - name: Build and Push Multi-platform Images to DockerHub
+ uses: rudderlabs/build-scan-push-action@96d7bfca912dd2e8805dbb322dc2540504ecac1e # v2.1.0
if: ${{ needs.check_actor.outputs.is_dependabot == 'false' }}
with:
context: .
file: ${{ inputs.dockerfile }}
target: ${{ inputs.push_target }}
push: true
- tags: ${{ inputs.push_tags }}-amd64
- platforms: |
- linux/amd64
+ tags: ${{ matrix.build-config.tags }}
+ platforms: ${{ matrix.build-config.platform }}
build-args: |
- version=${{ inputs.img_tag }}-amd64
+ version=${{ matrix.build-config.image_tag }}
GIT_COMMIT_SHA=${{ github.sha }}
- # cache-from: type=gha
- # cache-to: type=gha,mode=max
- create-manifest:
- name: Create multi-arch manifest
+ create-manifest-ecr:
+ name: Create multi-arch manifest for ECR
runs-on: ubuntu-latest
- needs: [check_actor, build-transformer-image-amd64, build-transformer-image-arm64]
+ permissions:
+ id-token: write # for AWS OIDC authentication (aws-actions/configure-aws-credentials)
+ contents: read # minimum required permission for job execution
+ needs: [build-images, check_actor]
if: ${{ needs.check_actor.outputs.is_dependabot == 'false' }}
+ steps:
+ - name: Harden the runner (Audit all outbound calls)
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
+ with:
+ egress-policy: audit
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
+ - name: Configure AWS Credentials
+ uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7 # v6.0.0
+ with:
+ aws-region: ${{ vars.AWS_ECR_REGION }}
+ role-to-assume: ${{ vars.AWS_ECR_IAM_ROLE_ARN }}
+ role-session-name: ${{ github.event.repository.name }}-manifest
+
+ - name: Login to ECR
+ id: login-ecr
+ uses: aws-actions/amazon-ecr-login@062b18b96a7aff071d4dc91bc00c4c1a7945b076 # 2.0.1
+
+ - name: Create multi-arch manifest for ECR
+ run: |
+ tag=${{ steps.login-ecr.outputs.registry }}/${{ inputs.image_repo }}:${{ inputs.image_tag }}
+ docker buildx imagetools create -t $tag $tag-amd64 $tag-arm64
+
+ - name: Create latest multi-arch manifest for ECR
+ # To be triggered only for release/hotfix PR merges
+ if: ${{ inputs.create_latest_tag == true }}
+ run: |
+ tag=${{ steps.login-ecr.outputs.registry }}/${{ inputs.image_repo }}:${{ inputs.image_tag }}
+ docker buildx imagetools create -t ${{ steps.login-ecr.outputs.registry }}/${{ inputs.image_repo }}:${{ inputs.build_type == 'ut' && 'ut-' || '' }}latest $tag-amd64 $tag-arm64
+
+ create-manifest-dockerhub:
+ name: Create multi-arch manifest for DockerHub
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read # minimum required permission for job execution
+ needs: [build-images, check_actor]
+ if: ${{ needs.check_actor.outputs.is_dependabot == 'false' }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
+
- name: Login to DockerHub
- uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
+ uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0
with:
username: ${{ env.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- - name: Set up Docker Buildx
- uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
-
- name: Create multi-arch manifest
run: |
- docker buildx imagetools create -t ${{ inputs.push_tags }} ${{ inputs.push_tags }}-amd64 ${{ inputs.push_tags }}-arm64
+ tag=${{ inputs.image_repo }}:${{ inputs.image_tag }}
+ docker buildx imagetools create -t $tag $tag-amd64 $tag-arm64
- name: Create latest multi-arch manifest
- # To be triggered only for release/hotfix PR merges coming from `prepare-for-prod-dt-deploy.yaml`
- if: ${{ inputs.build_type == 'dt' }}
- run: |
- docker buildx imagetools create -t rudderstack/rudder-transformer:latest ${{ inputs.push_tags }}-amd64 ${{ inputs.push_tags }}-arm64
-
- - name: Create latest ut multi-arch manifest
- # To be triggered only for release/hotfix PR merges coming from `prepare-for-prod-ut-deploy.yaml`
- if: ${{ inputs.build_type == 'ut' }}
+ # To be triggered only for release/hotfix PR merges
+ if: ${{ inputs.create_latest_tag == true }}
run: |
- docker buildx imagetools create -t rudderstack/rudder-transformer:ut-latest ${{ inputs.push_tags }}-amd64 ${{ inputs.push_tags }}-arm64
+ tag=${{ inputs.image_repo }}:${{ inputs.image_tag }}
+ docker buildx imagetools create -t ${{ inputs.image_repo }}:${{ inputs.build_type == 'ut' && 'ut-' || '' }}latest $tag-amd64 $tag-arm64
diff --git a/.github/workflows/check-pr-title.yml b/.github/workflows/check-pr-title.yml
index 5b9f2013bc4..96795f9291e 100644
--- a/.github/workflows/check-pr-title.yml
+++ b/.github/workflows/check-pr-title.yml
@@ -8,9 +8,6 @@ on:
- reopened
- synchronize
-permissions:
- pull-requests: read
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -19,9 +16,11 @@ jobs:
check-pr-title:
name: Check PR Title
runs-on: ubuntu-latest
+ permissions:
+ pull-requests: read # to read PR title for validation (rudderlabs/github-action-check-pr-title)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
diff --git a/.github/workflows/commitlint.yml b/.github/workflows/commitlint.yml
index 7c0ceb37eea..7366a318284 100644
--- a/.github/workflows/commitlint.yml
+++ b/.github/workflows/commitlint.yml
@@ -2,9 +2,6 @@ name: Commitlint
on: [push]
-permissions:
- contents: read
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -12,19 +9,21 @@ concurrency:
jobs:
commitlint:
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code with full history (actions/checkout)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0 # Needed to get full commit history
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
diff --git a/.github/workflows/create-hotfix-branch.yml b/.github/workflows/create-hotfix-branch.yml
index e595b224dc3..6aa45ec223f 100644
--- a/.github/workflows/create-hotfix-branch.yml
+++ b/.github/workflows/create-hotfix-branch.yml
@@ -7,33 +7,43 @@ on:
description: Hotfix branch name
required: true
-permissions:
- contents: write
-
jobs:
validate-actor:
+ permissions:
+ contents: read # to call reusable workflow (validate-actor.yml)
uses: ./.github/workflows/validate-actor.yml
with:
team_names: 'integrations,data-management'
secrets:
- PAT: ${{ secrets.PAT }}
+ RELEASE_PRIVATE_KEY: ${{ secrets.RELEASE_PRIVATE_KEY }}
create-branch:
name: Create New Hotfix Branch
runs-on: ubuntu-latest
needs: validate-actor
+ permissions:
+ contents: read # to read repo metadata; branch creation uses app token (create-github-app-token)
+
# Only allow these users to create new hotfix branch from 'main'
if: github.ref == 'refs/heads/main'
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create branches
+
- name: Create Branch
uses: peterjgrainger/action-create-branch@10c7d268152480ae859347db45dc69086cef1d9c
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
with:
branch: 'hotfix/${{ inputs.hotfix_name }}'
diff --git a/.github/workflows/draft-new-release.yml b/.github/workflows/draft-new-release.yml
index 3e8b209f642..d62e8fa1872 100644
--- a/.github/workflows/draft-new-release.yml
+++ b/.github/workflows/draft-new-release.yml
@@ -2,38 +2,49 @@ name: Draft New Release
on: workflow_dispatch
-permissions:
- contents: write
- pull-requests: write
-
jobs:
validate-actor:
+ permissions:
+ contents: read # to call reusable workflow (validate-actor.yml)
uses: ./.github/workflows/validate-actor.yml
with:
team_names: 'integrations,data-management'
secrets:
- PAT: ${{ secrets.PAT }}
+ RELEASE_PRIVATE_KEY: ${{ secrets.RELEASE_PRIVATE_KEY }}
draft-new-release:
name: Draft New Release
runs-on: ubuntu-latest
needs: validate-actor
+ permissions:
+ contents: read # to checkout repository code; writes use app token (create-github-app-token)
+
# Only allow release stakeholders to initiate releases
if: (github.ref == 'refs/heads/develop' || startsWith(github.ref, 'refs/heads/hotfix/'))
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits, tags, and push branches
+ permission-pull-requests: write # to create and update PRs
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
+ token: ${{ steps.generate-token.outputs.token }}
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
@@ -44,13 +55,6 @@ jobs:
run: |
npm ci
- # In order to make a commit, we need to initialize a user.
- # You may choose to write something less generic here if you want, it doesn't matter functionality wise.
- - name: Initialize Mandatory Git Config
- run: |
- git config --global user.name "GitHub Actions"
- git config --global user.email "noreply@github.com"
-
# Calculate the next release version based on conventional semantic release
- name: Create Release Branch
id: create-release
@@ -87,13 +91,26 @@ jobs:
- name: Update Changelog & Bump Version
id: finish-release
+ env:
+ HUSKY: 0
run: |
echo "Current version: $CURRENT_VERSION_VALUE"
echo "New version: $NEW_VERSION_VALUE"
npx replace $CURRENT_VERSION_VALUE $NEW_VERSION_VALUE sonar-project.properties
- git add sonar-project.properties
- npm run release -- -a --skip.tag --no-verify
- git push
+ npm run release -- --skip.commit --skip.tag --no-verify
+
+ - name: Create verified commit via API
+ uses: ryancyq/github-signed-commit@e9f3b28c80da7be66d24b8f501a5abe82a6b855f # v1.2.0
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ with:
+ branch-name: ${{ steps.create-release.outputs.branch_name }}
+ commit-message: 'chore(release): v${{ steps.create-release.outputs.new_version }}'
+ files: |
+ CHANGELOG.md
+ package.json
+ package-lock.json
+ sonar-project.properties
- name: Create Pull Request
run: |
@@ -103,4 +120,4 @@ jobs:
--title "chore(release): pull ${{ steps.create-release.outputs.branch_name }} into main" \
--body ":crown: *An automated PR*"
env:
- GH_TOKEN: ${{ secrets.PAT }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
diff --git a/.github/workflows/dt-test-and-report-code-coverage.yml b/.github/workflows/dt-test-and-report-code-coverage.yml
index b2ed6dd4e9a..11ece525b87 100644
--- a/.github/workflows/dt-test-and-report-code-coverage.yml
+++ b/.github/workflows/dt-test-and-report-code-coverage.yml
@@ -1,16 +1,12 @@
name: DT Tests
on:
- workflow_call:
- secrets:
- SONAR_TOKEN:
- required: true
-
pull_request:
types: ['opened', 'reopened', 'synchronize']
-permissions:
- contents: read
-
+ push:
+ branches:
+ - main
+ - develop
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -18,9 +14,11 @@ concurrency:
jobs:
get_workflow_url:
runs-on: ubuntu-latest
+ permissions:
+ contents: read # minimum required permission for job execution
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
@@ -34,22 +32,24 @@ jobs:
coverage:
name: Code Coverage
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code with full history (actions/checkout)
needs: [get_workflow_url]
outputs:
tests_run_outcome: ${{steps.run_tests.outcome}}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
@@ -70,14 +70,14 @@ jobs:
npm run lint:fix
- name: Upload Coverage Reports to Codecov
- uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
+ uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
directory: ./reports/coverage
- name: Upload TS Coverage Reports to Codecov
- uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
+ uses: codecov/codecov-action@671740ac38dd9b0130fbe1cec585b89eea48d3de # v5.5.2
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
@@ -92,13 +92,15 @@ jobs:
- name: SonarCloud Scan
if: always()
- uses: SonarSource/sonarqube-scan-action@fd88b7d7ccbaefd23d8f36f73b59db7a3d246602 # v6.0.0
+ uses: SonarSource/sonarqube-scan-action@a31c9398be7ace6bbfaf30c0bd5d415f843d45e9 # v7.0.0
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
notify:
name: slack notification on failure
+ permissions:
+ contents: read # to call reusable workflow (slack-notify.yml)
needs: [get_workflow_url, coverage]
if: needs.coverage.outputs.tests_run_outcome == 'failure' || failure()
uses: ./.github/workflows/slack-notify.yml
diff --git a/.github/workflows/housekeeping.yml b/.github/workflows/housekeeping.yml
index 568bf184872..df232629117 100644
--- a/.github/workflows/housekeeping.yml
+++ b/.github/workflows/housekeeping.yml
@@ -6,27 +6,24 @@ on:
- cron: '0 1 * * *'
workflow_dispatch:
-permissions:
- contents: read
-
jobs:
prs:
name: Clean Up Stale PRs and Issues
runs-on: ubuntu-latest
permissions:
- pull-requests: write
- issues: write
+ pull-requests: write # to label and close stale PRs (actions/stale)
+ issues: write # to label and close stale issues (actions/stale)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- - uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
+ - uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
with:
- repo-token: ${{ secrets.PAT }}
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
operations-per-run: 200
stale-pr-message: 'This PR is considered to be stale. It has been open for 20 days with no further activity thus it is going to be closed in 7 days. To avoid such a case please consider removing the stale label manually or add a comment to the PR.'
stale-issue-message: 'This issue is considered to be stale. It has been open for 30 days with no further activity thus it is going to be closed in 7 days. To avoid such a case please consider removing the stale label manually or add a comment to the issue.'
@@ -42,21 +39,21 @@ jobs:
runs-on: ubuntu-latest
permissions:
- contents: write
+ contents: write # to delete stale branches (beatlabs/delete-old-branches-action)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Delete Old Branches
uses: beatlabs/delete-old-branches-action@4eeeb8740ff8b3cb310296ddd6b43c3387734588 # v0.0.11
with:
- repo_token: ${{ secrets.PAT }}
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
date: '3 months ago'
dry_run: false
delete_tags: false
diff --git a/.github/workflows/ingestion-service-test.yml b/.github/workflows/ingestion-service-test.yml
index be7327e335e..8eebe778c01 100644
--- a/.github/workflows/ingestion-service-test.yml
+++ b/.github/workflows/ingestion-service-test.yml
@@ -7,18 +7,18 @@ on:
required: true
type: string
secrets:
- PAT:
- required: true
DOCKERHUB_TOKEN:
required: true
-
-permissions:
- contents: read
+ RELEASE_PRIVATE_KEY:
+ required: true
jobs:
ingestion-service-test:
name: ingestion-service-test
runs-on: ubuntu-latest
+
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TRANSFORMER_IMAGE_NAME_FOR_TEST: ${{ inputs.build_tag }}
@@ -26,17 +26,28 @@ jobs:
TRANSFORMER_DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: read # to read repository contents
+ repositories: rudder-ingestion-svc # to access cross-repo
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: 'Clone ingestion service'
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudder-ingestion-svc.git
+ gh repo clone rudderlabs/rudder-ingestion-svc
- name: Set up Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version-file: 'rudder-ingestion-svc/go.mod'
- name: 'Update Go module to use current branch'
diff --git a/.github/workflows/integrations_version_audit.yml b/.github/workflows/integrations_version_audit.yml
index b6a27ef17c4..96981ba788b 100644
--- a/.github/workflows/integrations_version_audit.yml
+++ b/.github/workflows/integrations_version_audit.yml
@@ -2,25 +2,24 @@ name: Integration Version Audit
on:
schedule:
- - cron: '0 9 1 * *' # Run monthly on the 1st at 9 AM UTC
+ - cron: '0 9 1 */2 *' # Run every 2 months on the 1st at 9 AM UTC
workflow_dispatch: {}
-permissions:
- contents: read
-
jobs:
audit:
name: Run Integration Version Audit
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Install Cursor CLI
run: |
diff --git a/.github/workflows/prepare-for-dev-deploy.yml b/.github/workflows/prepare-for-dev-deploy.yml
index 0da62679038..f5e603384de 100644
--- a/.github/workflows/prepare-for-dev-deploy.yml
+++ b/.github/workflows/prepare-for-dev-deploy.yml
@@ -15,29 +15,22 @@ concurrency:
cancel-in-progress: true
jobs:
- report-coverage:
- name: Report Code Coverage
- if: github.event_name == 'push'
- uses: ./.github/workflows/dt-test-and-report-code-coverage.yml
- secrets:
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-
generate-tag-names:
runs-on: ubuntu-latest
name: Generate Tag Names
- # Only merged pull requests must trigger
- if: github.event.pull_request.merged == true
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
outputs:
tag_name: ${{ steps.gen_tag_names.outputs.tag_name }}
tag_name_ut: ${{ steps.gen_tag_names.outputs.tag_name_ut }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -54,8 +47,9 @@ jobs:
build-transformer-image:
name: Build Transformer Docker Image - Dev
- # Only merged pull requests must trigger
- if: github.event.pull_request.merged == true
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
needs: [generate-tag-names]
uses: ./.github/workflows/build-push-docker-image.yml
with:
@@ -66,6 +60,7 @@ jobs:
load_target: development
push_target: production
use_merge_sha: true
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -73,8 +68,9 @@ jobs:
build-user-transformer-image:
name: Build User Transformer Docker Image - Dev
- # Only merged pull requests must trigger
- if: github.event.pull_request.merged == true
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
needs: [generate-tag-names]
uses: ./.github/workflows/build-push-docker-image.yml
with:
@@ -84,6 +80,7 @@ jobs:
dockerfile: Dockerfile-ut-func
load_target: development
push_target: production
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -92,15 +89,18 @@ jobs:
restart-k8s-deployment:
name: Restart K8s Deployment
runs-on: ubuntu-latest
+ permissions:
+ id-token: write # for AWS OIDC authentication (restart-deployment action uses aws-actions/configure-aws-credentials)
+ contents: read # to checkout repository code and local action (actions/checkout)
needs: [generate-tag-names, build-transformer-image]
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Restart Deployment
# # This action restarts the deployment in the specified namespaces
@@ -115,15 +115,18 @@ jobs:
restart-k8s-deployment-dedicated:
name: Restart K8s Deployment
runs-on: ubuntu-latest
+ permissions:
+ id-token: write # for AWS OIDC authentication (restart-deployment action uses aws-actions/configure-aws-credentials)
+ contents: read # to checkout repository code and local action (actions/checkout)
needs: [generate-tag-names, build-transformer-image]
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Restart Deployment
# # This action restarts the deployment in the specified namespaces
diff --git a/.github/workflows/prepare-for-prod-dt-deploy.yml b/.github/workflows/prepare-for-prod-dt-deploy.yml
index 28ec9d9f3b6..7c508c922d8 100644
--- a/.github/workflows/prepare-for-prod-dt-deploy.yml
+++ b/.github/workflows/prepare-for-prod-dt-deploy.yml
@@ -20,13 +20,6 @@ concurrency:
cancel-in-progress: true
jobs:
- report-coverage:
- name: Report Code Coverage
- if: github.event_name == 'push'
- uses: ./.github/workflows/dt-test-and-report-code-coverage.yml
- secrets:
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-
generate-tag-names:
runs-on: ubuntu-latest
name: Generate Tag Names
@@ -37,12 +30,12 @@ jobs:
tag_name_ut: ${{ steps.gen_tag_names.outputs.tag_name_ut }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -68,7 +61,8 @@ jobs:
push_target: production
build_type: dt
use_merge_sha: true
- skip_tests: ${{startsWith(github.event.pull_request.head.ref, 'hotfix-release/')}}
+ skip_tests: true
+ create_latest_tag: true
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -78,17 +72,31 @@ jobs:
name: Update Helm Charts For Production and Create Pull Request
runs-on: ubuntu-latest
needs: [generate-tag-names, build-transformer-image]
+
+ permissions:
+ contents: read # to checkout repository code; cross-repo writes use app token (create-github-app-token)
+
env:
TAG_NAME: ${{ needs.generate-tag-names.outputs.tag_name }}
- TF_IMAGE_REPOSITORY: rudderstack/rudder-transformer
+ TF_IMAGE_REPOSITORY: ${{ vars.AWS_ECR_REGISTRY }}/rudderstack/rudder-transformer
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits and push branches
+ permission-pull-requests: write # to create and update PRs
+ repositories: rudder-devops,rudderstack-operator # to access cross-repo
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -104,8 +112,10 @@ jobs:
echo "Transformer: $TAG_NAME"
- name: Clone Devops Repo
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudder-devops.git
+ gh repo clone rudderlabs/rudder-devops
- name: Extract branch name
id: extract_branch_name
@@ -117,67 +127,127 @@ jobs:
- name: Update Helm Charts and Raise Pull Request
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
git checkout -b shared-transformer-$TAG_NAME
cd helm-charts/shared-services/per-az/environment/production
yq eval -i ".rudder-transformer.image.tag=\"$TAG_NAME\"" production.yaml
- yq eval -i ".rudder-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" production.yaml
git add production.yaml
yq eval -i ".rudder-transformer.image.tag=\"$TAG_NAME\"" enterprise/enterprise.yaml
- yq eval -i ".rudder-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" enterprise/enterprise.yaml
git add enterprise/enterprise.yaml
yq eval -i ".rudder-transformer.image.tag=\"$TAG_NAME\"" multi-tenant/multi-tenant.yaml
- yq eval -i ".rudder-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" multi-tenant/multi-tenant.yaml
git add multi-tenant/multi-tenant.yaml
cd ../../../../config-be-rudder-transformer/environment/prod
yq eval -i ".config-be-rudder-transformer.image.tag=\"$TAG_NAME\"" base.yaml
git add base.yaml
- git commit -m "chore: upgrade shared transformers to $TAG_NAME"
+ - name: Push shared transformers branch to remote
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudder-devops
git push -u origin shared-transformer-$TAG_NAME
+ - name: Create verified commit for shared transformers
+ uses: ryancyq/github-signed-commit@e9f3b28c80da7be66d24b8f501a5abe82a6b855f # v1.2.0
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ with:
+ working-directory: rudder-devops
+ branch-name: shared-transformer-${{ env.TAG_NAME }}
+ commit-message: 'chore: upgrade shared transformers to ${{ env.TAG_NAME }}'
+ files: |
+ helm-charts/shared-services/per-az/environment/production/production.yaml
+ helm-charts/shared-services/per-az/environment/production/enterprise/enterprise.yaml
+ helm-charts/shared-services/per-az/environment/production/multi-tenant/multi-tenant.yaml
+ helm-charts/config-be-rudder-transformer/environment/prod/base.yaml
+
+ - name: Create PR for shared transformers
+ env:
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudder-devops
gh pr create --fill
- name: Update Helm Chart and Raise Pull Request For Hosted Transformer
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
git checkout -b hosted-transformer-$TAG_NAME ${{steps.extract_branch_name.outputs.branch_name}}
cd customer-objects/multi-tenant-us
yq eval -i ".spec.transformer.image.version=\"$TAG_NAME\"" hostedmtedmt.yaml
- yq eval -i ".spec.transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" hostedmtedmt.yaml
git add hostedmtedmt.yaml
- git commit -m "chore: upgrade hosted transformer to $TAG_NAME"
+ - name: Push hosted transformer branch to remote
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudder-devops
git push -u origin hosted-transformer-$TAG_NAME
+ - name: Create verified commit for hosted transformer
+ uses: ryancyq/github-signed-commit@e9f3b28c80da7be66d24b8f501a5abe82a6b855f # v1.2.0
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ with:
+ working-directory: rudder-devops
+ branch-name: hosted-transformer-${{ env.TAG_NAME }}
+ commit-message: 'chore: upgrade hosted transformer to ${{ env.TAG_NAME }}'
+ files: |
+ customer-objects/multi-tenant-us/hostedmtedmt.yaml
+
+ - name: Create PR for hosted transformer
+ env:
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudder-devops
gh pr create --fill
- name: Clone Operator Repo
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudderstack-operator.git
+ gh repo clone rudderlabs/rudderstack-operator
- name: Update helm charts and raise pull request for dedicated transformer from operator
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudderstack-operator
git checkout -b dedicated-transformer-$TAG_NAME ${{steps.extract_branch_name.outputs.branch_name}}
cd operator-helm/valuefiles
yq eval -i ".transformer.image.version=\"$TAG_NAME\"" values.yaml
- yq eval -i ".transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" values.yaml
git add values.yaml
- git commit -m "chore: upgrade dedicated transformer default version to $TAG_NAME"
+ - name: Push dedicated transformer branch to remote
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudderstack-operator
git push -u origin dedicated-transformer-$TAG_NAME
+ - name: Create verified commit for dedicated transformer
+ uses: ryancyq/github-signed-commit@e9f3b28c80da7be66d24b8f501a5abe82a6b855f # v1.2.0
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ with:
+ working-directory: rudderstack-operator
+ branch-name: dedicated-transformer-${{ env.TAG_NAME }}
+ commit-message: 'chore: upgrade dedicated transformer default version to ${{ env.TAG_NAME }}'
+ files: |
+ operator-helm/valuefiles/values.yaml
+
+ - name: Create PR for dedicated transformer
+ env:
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
+ cd rudderstack-operator
gh pr create --fill
diff --git a/.github/workflows/prepare-for-prod-rollback.yml b/.github/workflows/prepare-for-prod-rollback.yml
index 5c30cd2c6da..6425f1457ad 100644
--- a/.github/workflows/prepare-for-prod-rollback.yml
+++ b/.github/workflows/prepare-for-prod-rollback.yml
@@ -3,55 +3,65 @@ name: Prepare for Rollback in Production Environment
on:
workflow_dispatch:
-permissions:
- contents: write
- pull-requests: write
-
jobs:
validate-actor:
+ permissions:
+ contents: read # to call reusable workflow (validate-actor.yml)
uses: ./.github/workflows/validate-actor.yml
with:
team_names: 'integrations'
secrets:
- PAT: ${{ secrets.PAT }}
+ RELEASE_PRIVATE_KEY: ${{ secrets.RELEASE_PRIVATE_KEY }}
create-rollback-pr:
name: Update Helm Charts For Production and Create Pull Request
runs-on: ubuntu-latest
needs: validate-actor
+ permissions:
+ contents: read # to read repo metadata; cross-repo writes use app token (create-github-app-token)
+
# Only allow to be deployed from tags and main branch
if: (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/main'))
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits and push branches
+ permission-pull-requests: write # to create and update PRs
+ repositories: rudder-devops,rudderstack-operator # to access cross-repo
+
- name: Get Target Version
id: target-version
+ env:
+ REF_NAME: ${{ github.ref_name }}
run: |
- version=${{ github.ref_name }}
+ version="$REF_NAME"
echo "tag_name=$version" >> $GITHUB_OUTPUT
- echo "Target Version: $tag_name"
-
- # In order to make a commit, we need to initialize a user.
- # You may choose to write something less generic here if you want, it doesn't matter functionality wise.
- - name: Initialize Mandatory Git Config
- run: |
- git config --global user.name "GitHub Actions"
- git config --global user.email "noreply@github.com"
+ echo "Target Version: $version"
- name: Clone Devops Repo
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudder-devops.git
+ gh repo clone rudderlabs/rudder-devops
- name: Update Helm Charts and Raise Pull Request
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b shared-transformer-rollback-${{ steps.target-version.outputs.tag_name }}
cd helm-charts/shared-services/per-az
@@ -79,9 +89,11 @@ jobs:
- name: Update helm charts and raise pull request for enterprise customers on dedicated transformers
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b dedicated-transformer-rollback-${{ steps.target-version.outputs.tag_name }}
cd customer-objects
@@ -116,14 +128,18 @@ jobs:
fi
- name: Clone Operator Repo
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudderstack-operator.git
+ gh repo clone rudderlabs/rudderstack-operator
- name: Update helm charts and raise pull request for dedicated transformer from operator
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudderstack-operator
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b dedicated-transformer-rollback-${{ steps.target-version.outputs.tag_name }}
cd operator-helm/valuefiles
diff --git a/.github/workflows/prepare-for-prod-ut-deploy.yml b/.github/workflows/prepare-for-prod-ut-deploy.yml
index e8d3134a79d..720ee0fe08d 100644
--- a/.github/workflows/prepare-for-prod-ut-deploy.yml
+++ b/.github/workflows/prepare-for-prod-ut-deploy.yml
@@ -20,13 +20,6 @@ concurrency:
cancel-in-progress: true
jobs:
- report-coverage:
- name: Report Code Coverage
- if: github.event_name == 'push'
- uses: ./.github/workflows/dt-test-and-report-code-coverage.yml
- secrets:
- SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
-
generate-tag-names:
runs-on: ubuntu-latest
name: Generate Tag Names
@@ -37,12 +30,12 @@ jobs:
tag_name: ${{ steps.gen_tag_names.outputs.tag_name }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -72,7 +65,8 @@ jobs:
push_target: production
build_type: ut
use_merge_sha: true
- skip_tests: ${{startsWith(github.event.pull_request.head.ref, 'hotfix-release/')}}
+ skip_tests: true
+ create_latest_tag: true
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -82,35 +76,43 @@ jobs:
name: Update Helm Charts For Production and Create Pull Request
runs-on: ubuntu-latest
needs: [generate-tag-names, build-user-transformer-image]
+
+ permissions:
+ contents: read # to checkout repository code; cross-repo writes use app token (create-github-app-token)
+
env:
UT_TAG_NAME: ${{ needs.generate-tag-names.outputs.tag_name_ut }}
TAG_NAME: ${{ needs.generate-tag-names.outputs.tag_name }}
- TF_IMAGE_REPOSITORY: rudderstack/rudder-transformer
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits and push branches
+ permission-pull-requests: write # to create and update PRs
+ repositories: rudder-devops # to access cross-repo
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
- # In order to make a commit, we need to initialize a user.
- # You may choose to write something less generic here if you want, it doesn't matter functionality wise.
- - name: Initialize Mandatory Git Config
- run: |
- git config --global user.name "GitHub Actions"
- git config --global user.email "noreply@github.com"
-
- name: Print Docker Image Tags
run: |
echo "User Transformer: $UT_TAG_NAME"
- name: Clone Devops Repo
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudder-devops.git
+ gh repo clone rudderlabs/rudder-devops
- name: Extract branch name
id: extract_branch_name
@@ -122,22 +124,21 @@ jobs:
- name: Update Helm Charts and Raise Pull Request
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b shared-user-transformer-$UT_TAG_NAME
cd helm-charts/shared-services/per-az/environment/production
yq eval -i ".user-transformer.image.tag=\"$UT_TAG_NAME\"" production.yaml
- yq eval -i ".user-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" production.yaml
git add production.yaml
yq eval -i ".user-transformer.image.tag=\"$UT_TAG_NAME\"" enterprise/enterprise.yaml
- yq eval -i ".user-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" enterprise/enterprise.yaml
git add enterprise/enterprise.yaml
yq eval -i ".user-transformer.image.tag=\"$UT_TAG_NAME\"" multi-tenant/multi-tenant.yaml
- yq eval -i ".user-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" multi-tenant/multi-tenant.yaml
git add multi-tenant/multi-tenant.yaml
cd -
@@ -153,14 +154,15 @@ jobs:
- name: Update Helm Chart and Raise Pull Request For Hosted Transformer
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b hosted-user-transformer-$UT_TAG_NAME ${{steps.extract_branch_name.outputs.branch_name}}
cd customer-objects/multi-tenant-us
yq eval -i ".spec.user-transformer.image.version=\"$UT_TAG_NAME\"" hostedmtedmt.yaml
- yq eval -i ".spec.user-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" hostedmtedmt.yaml
git add hostedmtedmt.yaml
git commit -m "chore: upgrade hosted user-transformer to $UT_TAG_NAME"
@@ -171,9 +173,11 @@ jobs:
- name: Update helm charts and raise pull request for enterprise customers on dedicated transformers
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
git checkout -b dedicated-user-transformer-$TAG_NAME
cd customer-objects
@@ -196,7 +200,6 @@ jobs:
# bump up the customers version and repository information
for customer in "${enabled_ut_customers[@]}"; do
yq eval -i ".spec.user-transformer.image.version=\"$UT_TAG_NAME\"" $customer
- yq eval -i ".spec.user-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" $customer
git add $customer
done
diff --git a/.github/workflows/prepare-for-staging-deploy.yml b/.github/workflows/prepare-for-staging-deploy.yml
index 727b1430b2a..910d9d5d740 100644
--- a/.github/workflows/prepare-for-staging-deploy.yml
+++ b/.github/workflows/prepare-for-staging-deploy.yml
@@ -22,6 +22,8 @@ jobs:
generate-tag-names:
runs-on: ubuntu-latest
name: Generate Tag Names
+ permissions:
+ contents: read # to checkout repository code and read package.json (actions/checkout)
# Only pull requests from release candidate branches must trigger
if: (startsWith(github.event.pull_request.head.ref, 'release/') || startsWith(github.event.pull_request.head.ref, 'hotfix-release/'))
outputs:
@@ -29,12 +31,12 @@ jobs:
tag_name_ut: ${{ steps.gen_tag_names.outputs.tag_name_ut }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
@@ -51,6 +53,9 @@ jobs:
build-transformer-image:
name: Build Transformer Docker Image - Staging
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
# Only pull requests from release candidate branches must trigger
if: (startsWith(github.event.pull_request.head.ref, 'release/') || startsWith(github.event.pull_request.head.ref, 'hotfix-release/'))
needs: [generate-tag-names]
@@ -63,6 +68,7 @@ jobs:
load_target: development
push_target: production
use_merge_sha: true
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -70,6 +76,9 @@ jobs:
build-user-transformer-image:
name: Build User Transformer Docker Image - Staging
+ permissions:
+ id-token: write # to pass OIDC token to reusable workflow (build-push-docker-image.yml)
+ contents: read # to checkout repository code and call reusable workflow (actions/checkout)
# Only pull requests from release candidate branches must trigger
if: (startsWith(github.event.pull_request.head.ref, 'release/') || startsWith(github.event.pull_request.head.ref, 'hotfix-release/'))
@@ -82,6 +91,7 @@ jobs:
dockerfile: Dockerfile-ut-func
load_target: development
push_target: production
+ create_latest_tag: false
secrets:
DOCKERHUB_PROD_TOKEN: ${{ secrets.DOCKERHUB_PROD_TOKEN }}
DOCKERHUB_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
@@ -91,28 +101,34 @@ jobs:
name: Update Helm Charts For Staging and Create Pull Request
runs-on: ubuntu-latest
needs: [generate-tag-names, build-transformer-image, build-user-transformer-image]
+
+ permissions:
+ contents: read # to checkout repository code; cross-repo writes use app token (create-github-app-token)
+
env:
- TF_IMAGE_REPOSITORY: rudderstack/rudder-transformer
TAG_NAME: ${{ needs.generate-tag-names.outputs.tag_name }}
UT_TAG_NAME: ${{ needs.generate-tag-names.outputs.tag_name_ut }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits and push branches
+ permission-pull-requests: write # to create and update PRs
+ repositories: rudder-devops # to access cross-repo
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
- # In order to make a commit, we need to initialize a user.
- # You may choose to write something less generic here if you want, it doesn't matter functionality wise.
- - name: Initialize Mandatory Git Config
- run: |
- git config --global user.name "GitHub Actions"
- git config --global user.email "noreply@github.com"
-
- name: Print Docker Image Tags
run: |
echo "Transformer: $TAG_NAME"
@@ -120,10 +136,12 @@ jobs:
- name: Update Helm Charts and Raise Pull Request
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
- git clone https://${{secrets.PAT}}@github.com/rudderlabs/rudder-devops.git
+ gh repo clone rudderlabs/rudder-devops
cd rudder-devops
+ git config user.name "GitHub Actions"
+ git config user.email "noreply@github.com"
BRANCH_NAME="shared-transformer-$TAG_NAME"
echo $BRANCH_NAME
if [ -n "$(git ls-remote --heads origin $BRANCH_NAME 2>/dev/null)" ]
@@ -133,7 +151,6 @@ jobs:
git checkout -b $BRANCH_NAME
cd helm-charts/shared-services/per-az/environment/staging
- yq eval -i ".rudder-transformer.image.repository=\"$TF_IMAGE_REPOSITORY\"" staging.yaml
yq eval -i ".rudder-transformer.image.tag=\"$TAG_NAME\"" staging.yaml
yq eval -i ".user-transformer.image.tag=\"$TAG_NAME\"" staging.yaml
git add staging.yaml
diff --git a/.github/workflows/publish-new-release.yml b/.github/workflows/publish-new-release.yml
index 626bc7a8f5b..b5e82b0f93c 100644
--- a/.github/workflows/publish-new-release.yml
+++ b/.github/workflows/publish-new-release.yml
@@ -10,23 +10,31 @@ on:
branches:
- main
-permissions:
- contents: write
- pull-requests: write
-
jobs:
release:
name: Publish New GitHub Release
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code; releases and tags use app token (create-github-app-token)
+
if: (startsWith(github.event.pull_request.head.ref, 'release/') || startsWith(github.event.pull_request.head.ref, 'hotfix-release/')) && github.event.pull_request.merged == true # only merged pull requests must trigger this job
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits, tags, and releases
+ permission-pull-requests: write # to create and update PRs
+
- name: Extract Version
id: extract-version
run: |
@@ -37,12 +45,13 @@ jobs:
echo "release_version=$version" >> $GITHUB_OUTPUT
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 0
+ token: ${{ steps.generate-token.outputs.token }}
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
@@ -60,40 +69,50 @@ jobs:
git config --global user.name "GitHub Actions"
git config --global user.email "noreply@github.com"
- - name: Tag & Create GitHub Release
- id: create_release
+ - name: Check if tag exists
+ id: check_tag
env:
- HUSKY: 0
- GITHUB_TOKEN: ${{ secrets.PAT }}
- GH_TOKEN: ${{ secrets.PAT }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ RELEASE_VERSION: ${{ steps.extract-version.outputs.release_version }}
run: |
- echo "🏷️ Checking and creating tag v${{ steps.extract-version.outputs.release_version }}..."
+ echo "🏷️ Checking if tag v$RELEASE_VERSION exists..."
git fetch --tags origin
-
- # Check if tag already exists
- if git rev-parse --verify "v${{ steps.extract-version.outputs.release_version }}" >/dev/null 2>&1; then
- echo "✅ Tag v${{ steps.extract-version.outputs.release_version }} already exists, skipping tag creation"
+ if git rev-parse --verify "v$RELEASE_VERSION" >/dev/null 2>&1; then
+ echo "✅ Tag v$RELEASE_VERSION already exists, skipping tag creation"
+ echo "tag_exists=true" >> $GITHUB_OUTPUT
else
- echo "🏷️ Creating new tag v${{ steps.extract-version.outputs.release_version }}..."
- git tag -a v${{ steps.extract-version.outputs.release_version }} -m "chore: release v${{ steps.extract-version.outputs.release_version }}"
- git push origin refs/tags/v${{ steps.extract-version.outputs.release_version }}
- echo "✅ Tag v${{ steps.extract-version.outputs.release_version }} created and pushed successfully"
+ echo "🏷️ Tag v$RELEASE_VERSION does not exist, will create"
+ echo "tag_exists=false" >> $GITHUB_OUTPUT
fi
+ - name: Create verified tag
+ if: steps.check_tag.outputs.tag_exists == 'false'
+ uses: ryancyq/github-signed-commit@e9f3b28c80da7be66d24b8f501a5abe82a6b855f # v1.2.0
+ env:
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ with:
+ branch-name: main
+ tag: 'v${{ steps.extract-version.outputs.release_version }}'
+ files: ''
+ tag-only-if-file-changes: false
+
+ - name: Create GitHub Release
+ id: create_release
+ env:
+ HUSKY: 0
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+ run: |
echo "🚀 Creating GitHub release with conventional commit notes..."
- # Make the script executable
chmod +x scripts/create-github-release.js
-
- # Create release using our modern script with Angular conventional commit style notes
node scripts/create-github-release.js
-
echo "✅ Release process completed successfully!"
echo "DATE=$(date)" >> $GITHUB_ENV
- name: Verify Release Creation
id: verify_release
env:
- GH_TOKEN: ${{ secrets.PAT }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
echo "🔍 Verifying that release v${{ steps.extract-version.outputs.release_version }} exists..."
@@ -151,7 +170,7 @@ jobs:
--title "chore(release): pull main into develop post release v${{ steps.extract-version.outputs.release_version }}" \
--body ":crown: *An automated PR*"
env:
- GH_TOKEN: ${{ secrets.PAT }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
- name: Notify Slack Channel
id: slack
diff --git a/.github/workflows/slack-notify.yml b/.github/workflows/slack-notify.yml
index fcc659c5a57..36e7fc331a0 100644
--- a/.github/workflows/slack-notify.yml
+++ b/.github/workflows/slack-notify.yml
@@ -10,21 +10,20 @@ on:
type: string
required: true
-permissions:
- contents: read
-
jobs:
notify:
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
if: ${{ inputs.should_notify }}
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: notify
uses: slackapi/slack-github-action@91efab103c0de0a537f72a35f6b8cda0ee76bf0a # v2.1.1
diff --git a/.github/workflows/update-ingestion-service.yml b/.github/workflows/update-ingestion-service.yml
index 6dbf948af3c..abac49b7c9c 100644
--- a/.github/workflows/update-ingestion-service.yml
+++ b/.github/workflows/update-ingestion-service.yml
@@ -7,39 +7,43 @@ on:
- completed
workflow_call:
secrets:
- PAT:
- description: 'Personal Access Token for repository access'
+ RELEASE_PRIVATE_KEY:
required: true
workflow_dispatch: {}
-permissions:
- contents: write
- pull-requests: write
-
jobs:
update-ingestion-service:
name: Update Ingestion Service with Latest Transformer Version
runs-on: ubuntu-latest
+
+ permissions:
+ contents: read # to checkout repository code; tag pushes and cross-repo writes use app token (create-github-app-token)
# Only run if the workflow_run was successful (doesn't apply to manual triggers)
if: >
github.event_name == 'workflow_dispatch' ||
- github.event_name == 'workflow_call' ||
- (
- github.event_name == 'workflow_run' &&
- github.event.workflow_run.conclusion == 'success' &&
- github.event.workflow_run.pull_requests[0].merged == true &&
- github.event.workflow_run.pull_requests[0].base.ref == 'main'
- )
+ github.event_name == 'workflow_call' ||
+ (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-contents: write # to create commits, tags, and push branches
+ permission-pull-requests: write # to create and update PRs
+ repositories: rudder-transformer,rudder-ingestion-svc # include current repo for tag push
+
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
+ token: ${{ steps.generate-token.outputs.token }}
# In order to make a commit, we need to initialize a user.
- name: Initialize Mandatory Git Config
@@ -50,7 +54,7 @@ jobs:
- name: Get Latest Transformer Release Version
id: get_latest_version
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
# Get the latest release tag from GitHub API
latest_tag=$(gh api repos/rudderlabs/rudder-transformer/releases/latest --jq '.tag_name')
@@ -71,7 +75,7 @@ jobs:
- name: Check and Create Go Module Tag
id: create_go_tag
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
VERSION: ${{ steps.get_latest_version.outputs.version }}
GO_TAG: ${{ steps.get_latest_version.outputs.go_tag }}
run: |
@@ -94,10 +98,10 @@ jobs:
fi
- name: Clone Ingestion Service Repo
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
repository: rudderlabs/rudder-ingestion-svc
- token: ${{ secrets.PAT }}
+ token: ${{ steps.generate-token.outputs.token }}
path: rudder-ingestion-svc
fetch-depth: 1
@@ -110,7 +114,7 @@ jobs:
echo "Go version from ingestion service: $go_version"
- name: Setup Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: ${{ steps.get_go_version.outputs.go_version }}
@@ -163,7 +167,7 @@ jobs:
- name: Create Pull Request
if: steps.update.outputs.has_changes == 'true'
env:
- GITHUB_TOKEN: ${{ secrets.PAT }}
+ GITHUB_TOKEN: ${{ steps.generate-token.outputs.token }}
LATEST_TAG: ${{ steps.get_latest_version.outputs.latest_tag }}
run: |
cd rudder-ingestion-svc
diff --git a/.github/workflows/ut-tests.yml b/.github/workflows/ut-tests.yml
index 8f1a6c7af17..e46023bbc27 100644
--- a/.github/workflows/ut-tests.yml
+++ b/.github/workflows/ut-tests.yml
@@ -7,9 +7,6 @@ on:
- reopened
- synchronize
-permissions:
- contents: read
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -18,14 +15,16 @@ jobs:
ut_tests:
name: UT Tests
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Setup Go
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # v6.1.0
+ uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0
with:
go-version: 1.17
@@ -33,12 +32,12 @@ jobs:
run: go install sigs.k8s.io/kind@v0.17.0
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
diff --git a/.github/workflows/validate-actor.yml b/.github/workflows/validate-actor.yml
index c35043f9bd2..b513bd0da1f 100644
--- a/.github/workflows/validate-actor.yml
+++ b/.github/workflows/validate-actor.yml
@@ -2,32 +2,39 @@ name: Validate Actor
on:
workflow_call:
- secrets:
- PAT:
- required: true
inputs:
team_names:
description: 'Comma-separated list of team names'
type: string
default: 'integrations'
-
-permissions:
- contents: read
+ secrets:
+ RELEASE_PRIVATE_KEY:
+ required: true
jobs:
validate-actor:
runs-on: [self-hosted, Linux, X64]
+ permissions:
+ contents: read # minimum required permission; org team membership check uses app token (create-github-app-token)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@df199fb7be9f65074067a9eb93f12bb4c5547cf2 # v2.13.3
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
+ - name: Generate GitHub App Token
+ id: generate-token
+ uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
+ with:
+ app-id: ${{ vars.RELEASE_APP_ID }}
+ private-key: ${{ secrets.RELEASE_PRIVATE_KEY }}
+ permission-members: read # to check org team membership
+
- name: Validate if actor is allowed to trigger the workflow
env:
ORG_NAME: rudderlabs
TEAM_NAMES: ${{ inputs.team_names }}
- GH_TOKEN: ${{ secrets.PAT }}
+ GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
actor=${{ github.actor || github.triggering_actor }}
allowed=false
diff --git a/.github/workflows/verify-server-start.yml b/.github/workflows/verify-server-start.yml
index f9b1ab3206e..c456aa162ba 100644
--- a/.github/workflows/verify-server-start.yml
+++ b/.github/workflows/verify-server-start.yml
@@ -4,9 +4,6 @@ on:
pull_request:
types: ['opened', 'reopened', 'synchronize']
-permissions:
- contents: read
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -14,20 +11,22 @@ concurrency:
jobs:
check-health:
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
fetch-depth: 1
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: '.nvmrc'
cache: 'npm'
diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml
index e7275797e35..24e9e6831ff 100644
--- a/.github/workflows/verify.yml
+++ b/.github/workflows/verify.yml
@@ -3,10 +3,6 @@ name: Verify
on:
pull_request:
-permissions:
- contents: read
- pull-requests: read
-
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.sha }}
cancel-in-progress: true
@@ -15,21 +11,24 @@ jobs:
formatting-lint:
name: Check for formatting & lint errors
runs-on: ubuntu-latest
+ permissions:
+ contents: read # to checkout repository code (actions/checkout)
+ pull-requests: read # to list changed files in the PR (Ana06/get-changed-files)
steps:
- name: Harden the runner (Audit all outbound calls)
- uses: step-security/harden-runner@f4a75cfd619ee5ce8d5b864b0d183aff3c69b55a # v2.13.1
+ uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- name: Checkout
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
+ uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
# Make sure the actual branch is checked out when running on pull requests
ref: ${{ github.head_ref }}
- name: Setup Node
- uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 # v6.0.0
+ uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
with:
node-version-file: .nvmrc
cache: 'npm'
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a29d66f45ce..80357cc78c3 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,6 +2,134 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
+## [1.125.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.124.4...v1.125.0) (2026-03-09)
+
+
+### Features
+
+* **gaec:** support userDefined adjustment type ([#5016](https://github.com/rudderlabs/rudder-transformer/issues/5016)) ([1c7391c](https://github.com/rudderlabs/rudder-transformer/commit/1c7391cc01b6fd36cef27cde1e89a471e243016f))
+
+
+### Bug Fixes
+
+* **iterable:** source transformation to handle userId ([#5017](https://github.com/rudderlabs/rudder-transformer/issues/5017)) ([fcee8a6](https://github.com/rudderlabs/rudder-transformer/commit/fcee8a6a680738e4863cf27890d24203847b774e))
+
+### [1.124.4](https://github.com/rudderlabs/rudder-transformer/compare/v1.124.3...v1.124.4) (2026-03-05)
+
+
+### Bug Fixes
+
+* tiktok zod validations for listdata ([8494b2d](https://github.com/rudderlabs/rudder-transformer/commit/8494b2d041efbd71bf6d3fbed8f922fdd103df16))
+
+### [1.124.3](https://github.com/rudderlabs/rudder-transformer/compare/v1.124.2...v1.124.3) (2026-03-02)
+
+
+### Bug Fixes
+
+* tiktok audience list message type ([#5011](https://github.com/rudderlabs/rudder-transformer/issues/5011)) ([89ce201](https://github.com/rudderlabs/rudder-transformer/commit/89ce201f1731c83cd3ca71da1332f88d7aebeaad)), closes [/github.com/rudderlabs/rudder-transformer/pull/4971/changes/6a0ec7da38342d9f48548d04c16b634ba490220b#diff-abed566c996f7d32111260dfa5b8d1cddb2025c236fb3f4c998e598a00597b8aL14-L25](https://github.com/rudderlabs//github.com/rudderlabs/rudder-transformer/pull/4971/changes/6a0ec7da38342d9f48548d04c16b634ba490220b/issues/diff-abed566c996f7d32111260dfa5b8d1cddb2025c236fb3f4c998e598a00597b8aL14-L25)
+
+### [1.124.2](https://github.com/rudderlabs/rudder-transformer/compare/v1.124.1...v1.124.2) (2026-02-27)
+
+
+### Bug Fixes
+
+* **fb_custom_audience:** make max payload size configurable ([#5003](https://github.com/rudderlabs/rudder-transformer/issues/5003)) ([0d02f16](https://github.com/rudderlabs/rudder-transformer/commit/0d02f16a6eddbf9b53931ac0be49bbaa0508d51d))
+
+### [1.124.1](https://github.com/rudderlabs/rudder-transformer/compare/v1.124.0...v1.124.1) (2026-02-24)
+
+
+### Bug Fixes
+
+* **mp:** 500 error for invalid timestamp ([#4994](https://github.com/rudderlabs/rudder-transformer/issues/4994)) ([28310b9](https://github.com/rudderlabs/rudder-transformer/commit/28310b91fbd8058517ad18cbaef2856af765e6ef))
+
+## [1.124.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.123.0...v1.124.0) (2026-02-19)
+
+
+### Features
+
+* add support of region on CustomerIo Audience ([#4982](https://github.com/rudderlabs/rudder-transformer/issues/4982)) ([0ac91c9](https://github.com/rudderlabs/rudder-transformer/commit/0ac91c92c0d1261f4ed0cd856ca5eaf8d79446bc))
+
+
+### Bug Fixes
+
+* remove redundant url encoding of query params ([#4985](https://github.com/rudderlabs/rudder-transformer/issues/4985)) ([e4b47bc](https://github.com/rudderlabs/rudder-transformer/commit/e4b47bc16ca3d34ef9275795ab0c3e95404e7a31))
+
+## [1.123.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.122.0...v1.123.0) (2026-02-16)
+
+
+### Features
+
+* **hs:** add support for HubSpot V3 batch upsert endpoint ([#4953](https://github.com/rudderlabs/rudder-transformer/issues/4953)) ([e7daa9c](https://github.com/rudderlabs/rudder-transformer/commit/e7daa9c2614fe5a625578be4a34b80151fd12cad))
+* **singular:** add data_sharing_options mapping for /launch and /evt ([#4947](https://github.com/rudderlabs/rudder-transformer/issues/4947)) ([e0b9a6a](https://github.com/rudderlabs/rudder-transformer/commit/e0b9a6a27c5cf10ff6f6ffd6efef190e9a582c66))
+* **singular:** add V2 event API with SDID-based device identification ([#4976](https://github.com/rudderlabs/rudder-transformer/issues/4976)) ([6d03fc3](https://github.com/rudderlabs/rudder-transformer/commit/6d03fc3336a4ecafee9ab8dc946dc026aec1dc0f))
+* tiktok audience zod validations ([#4971](https://github.com/rudderlabs/rudder-transformer/issues/4971)) ([5750f49](https://github.com/rudderlabs/rudder-transformer/commit/5750f499af799451523129a7738a8dd64f5b9a98))
+
+## [1.122.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.121.1...v1.122.0) (2026-02-10)
+
+
+### Features
+
+* tiktok typescript migration ([#4937](https://github.com/rudderlabs/rudder-transformer/issues/4937)) ([d8d3f37](https://github.com/rudderlabs/rudder-transformer/commit/d8d3f37021b83180e000587c5285a84a269b2bae))
+
+
+### Bug Fixes
+
+* endpoint label missing for attentive tag destination ([#4951](https://github.com/rudderlabs/rudder-transformer/issues/4951)) ([62b0617](https://github.com/rudderlabs/rudder-transformer/commit/62b06179a2f7542913c38105adc46b22a98a0378))
+* typo ([#4948](https://github.com/rudderlabs/rudder-transformer/issues/4948)) ([8a5fd0a](https://github.com/rudderlabs/rudder-transformer/commit/8a5fd0a3babe1f774c79cbced29c0bd5be149ee6))
+
+### [1.121.1](https://github.com/rudderlabs/rudder-transformer/compare/v1.121.0...v1.121.1) (2026-02-09)
+
+## [1.121.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.120.0...v1.121.0) (2026-01-29)
+
+
+### Features
+
+* cdk v2 migration support ([#4927](https://github.com/rudderlabs/rudder-transformer/issues/4927)) ([ce4b5b0](https://github.com/rudderlabs/rudder-transformer/commit/ce4b5b0208886d7f31e2a7fdaafc2b377138deaf))
+* new testRun endpoint ([a5d248c](https://github.com/rudderlabs/rudder-transformer/commit/a5d248cd439b4801232b3d3efffe3100ab4570ae))
+* new testRun endpoint ([#4926](https://github.com/rudderlabs/rudder-transformer/issues/4926)) ([d1239ad](https://github.com/rudderlabs/rudder-transformer/commit/d1239ada7d5fa5e38de99d77a8aa0cc60ee0424f))
+
+
+### Bug Fixes
+
+* remove isSuccess label ([#4928](https://github.com/rudderlabs/rudder-transformer/issues/4928)) ([914c65f](https://github.com/rudderlabs/rudder-transformer/commit/914c65f4d4dfd5f3cbd22afacb1e8e6c177102ba))
+
+## [1.120.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.119.0...v1.120.0) (2026-01-20)
+
+
+### Features
+
+* add support of ALL and NONE enum for SOQL rollout ([#4885](https://github.com/rudderlabs/rudder-transformer/issues/4885)) ([60631b0](https://github.com/rudderlabs/rudder-transformer/commit/60631b021921239022b0da0c26d2af3d9aca7339))
+* add X-Rudder-Permanent-Error header for 4xx errors in source hydration API ([#4906](https://github.com/rudderlabs/rudder-transformer/issues/4906)) ([1d74001](https://github.com/rudderlabs/rudder-transformer/commit/1d74001b2845aa4ad2b83326e02dc16693a79c81))
+* **braze:** update batching as per new MAU plan ([#4915](https://github.com/rudderlabs/rudder-transformer/issues/4915)) ([afd3f42](https://github.com/rudderlabs/rudder-transformer/commit/afd3f4207f57a4ee95f3a160c57c33f37429b0fc))
+* update onesignal header as per request ([#4870](https://github.com/rudderlabs/rudder-transformer/issues/4870)) ([51f0ab1](https://github.com/rudderlabs/rudder-transformer/commit/51f0ab1a9586e73c41e9171b90a2c3055d22ec67))
+
+
+### Bug Fixes
+
+* add mapping support for clickId in Impact ([#4884](https://github.com/rudderlabs/rudder-transformer/issues/4884)) ([ba10c35](https://github.com/rudderlabs/rudder-transformer/commit/ba10c35f39c490f7f554ff4465765d92d9fdda7e))
+* add typecast to the email string before running trim function ([#4829](https://github.com/rudderlabs/rudder-transformer/issues/4829)) ([56203fb](https://github.com/rudderlabs/rudder-transformer/commit/56203fbe3d9f50b79bf9cb1fe25de06b0efb5dd4))
+* add validation of lookupFieldValue before making lookup api call ([#4876](https://github.com/rudderlabs/rudder-transformer/issues/4876)) ([1939ebb](https://github.com/rudderlabs/rudder-transformer/commit/1939ebb991ada8ff989e4bfb6c538406bdf37ed9))
+* eliminate race condition in Reddit timestamp boundary tests ([#4897](https://github.com/rudderlabs/rudder-transformer/issues/4897)) ([8446b8a](https://github.com/rudderlabs/rudder-transformer/commit/8446b8ad9dc61d6ac640251637b1bc402ee61b5c))
+* fetch with dns wrapper does not reuse http client ([#4873](https://github.com/rudderlabs/rudder-transformer/issues/4873)) ([37ecd65](https://github.com/rudderlabs/rudder-transformer/commit/37ecd658247e1eb42fd1cbb22e4a10bba4e9e0f4))
+* indent ([dd24ac8](https://github.com/rudderlabs/rudder-transformer/commit/dd24ac8856a65ffeae76be1b781d54daf6ac18de))
+* unexpected string data type in zendesk field endpoint ([#4877](https://github.com/rudderlabs/rudder-transformer/issues/4877)) ([8eeaae4](https://github.com/rudderlabs/rudder-transformer/commit/8eeaae444540bb39f4a7b19cb41e03d39bbe7641))
+
+## [1.119.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.118.0...v1.119.0) (2026-01-06)
+
+
+### Features
+
+* add support of ALL and NONE enum for SOQL rollout ([#4885](https://github.com/rudderlabs/rudder-transformer/issues/4885)) ([60631b0](https://github.com/rudderlabs/rudder-transformer/commit/60631b021921239022b0da0c26d2af3d9aca7339))
+* update onesignal header as per request ([#4870](https://github.com/rudderlabs/rudder-transformer/issues/4870)) ([51f0ab1](https://github.com/rudderlabs/rudder-transformer/commit/51f0ab1a9586e73c41e9171b90a2c3055d22ec67))
+
+
+### Bug Fixes
+
+* add mapping support for clickId in Impact ([#4884](https://github.com/rudderlabs/rudder-transformer/issues/4884)) ([ba10c35](https://github.com/rudderlabs/rudder-transformer/commit/ba10c35f39c490f7f554ff4465765d92d9fdda7e))
+* add typecast to the email string before running trim function ([#4829](https://github.com/rudderlabs/rudder-transformer/issues/4829)) ([56203fb](https://github.com/rudderlabs/rudder-transformer/commit/56203fbe3d9f50b79bf9cb1fe25de06b0efb5dd4))
+* add validation of lookupFieldValue before making lookup api call ([#4876](https://github.com/rudderlabs/rudder-transformer/issues/4876)) ([1939ebb](https://github.com/rudderlabs/rudder-transformer/commit/1939ebb991ada8ff989e4bfb6c538406bdf37ed9))
+* unexpected string data type in zendesk field endpoint ([#4877](https://github.com/rudderlabs/rudder-transformer/issues/4877)) ([8eeaae4](https://github.com/rudderlabs/rudder-transformer/commit/8eeaae444540bb39f4a7b19cb41e03d39bbe7641))
+
## [1.118.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.117.1...v1.118.0) (2025-12-16)
diff --git a/README.md b/README.md
index bb645c1480a..4dbe036cec2 100644
--- a/README.md
+++ b/README.md
@@ -4,6 +4,7 @@
[](https://codecov.io/gh/rudderlabs/rudder-transformer)
+[](https://deepwiki.com/rudderlabs/rudder-transformer)
# RudderStack Transformer
diff --git a/go/webhook/testcases/testdata/testcases/appsflyer/test_2_0.json b/go/webhook/testcases/testdata/testcases/appsflyer/test_2_0.json
index 063fcb3c63c..c8b3ac4a1f4 100644
--- a/go/webhook/testcases/testdata/testcases/appsflyer/test_2_0.json
+++ b/go/webhook/testcases/testdata/testcases/appsflyer/test_2_0.json
@@ -17,7 +17,7 @@
"output": {
"response": {
"status": 400,
- "body": "Unknwon event type from Appsflyer\n"
+ "body": "Unknown event type from Appsflyer\n"
},
"queue": [],
"errQueue": [
diff --git a/go/webhook/testcases/testdata/testcases/iterable/test_1_0.json b/go/webhook/testcases/testdata/testcases/iterable/test_1_0.json
index 0b085c1ac8f..359577d17c1 100644
--- a/go/webhook/testcases/testdata/testcases/iterable/test_1_0.json
+++ b/go/webhook/testcases/testdata/testcases/iterable/test_1_0.json
@@ -17,7 +17,7 @@
"output": {
"response": {
"status": 400,
- "body": "Unknwon event type from Iterable\n"
+ "body": "Unknown event type from Iterable\n"
},
"queue": [],
"errQueue": [
diff --git a/go/webhook/testcases/testdata/testcases/iterable/test_2_0.json b/go/webhook/testcases/testdata/testcases/iterable/test_2_0.json
index 33cfe27a94c..a7f1443f6da 100644
--- a/go/webhook/testcases/testdata/testcases/iterable/test_2_0.json
+++ b/go/webhook/testcases/testdata/testcases/iterable/test_2_0.json
@@ -17,7 +17,7 @@
"output": {
"response": {
"status": 400,
- "body": "Unknwon event type from Iterable\n"
+ "body": "Unknown event type from Iterable\n"
},
"queue": [],
"errQueue": [
diff --git a/go/webhook/testcases/testdata/testcases/iterable/test_30_0.json b/go/webhook/testcases/testdata/testcases/iterable/test_30_0.json
new file mode 100644
index 00000000000..51b5df6df1e
--- /dev/null
+++ b/go/webhook/testcases/testdata/testcases/iterable/test_30_0.json
@@ -0,0 +1,54 @@
+{
+ "name": "iterable",
+ "description": "test-30",
+ "input": {
+ "request": {
+ "query": {},
+ "body": "{\"userId\":\"12345\",\"eventName\":\"webPushSendSkip\",\"dataFields\":{\"createdAt\":\"2019-08-07 23:43:48 +00:00\",\"reason\":\"DuplicateMarketingMessage\",\"campaignId\":723636,\"messageId\":\"4238c918b20a41dfbe9a910275b76f12\",\"email\":\"docs@iterable.com\"}}",
+ "headers": {
+ "Content-Type": "application/json"
+ },
+ "method": "POST"
+ },
+ "source": {
+ "config": "{}"
+ }
+ },
+ "output": {
+ "response": {
+ "status": 200,
+ "body": "OK"
+ },
+ "queue": [
+ {
+ "userId": "12345",
+ "context": {
+ "integration": {
+ "name": "Iterable",
+ "version": "1.0.0"
+ },
+ "library": {
+ "name": "unknown",
+ "version": "unknown"
+ }
+ },
+ "event": "webPushSendSkip",
+ "integrations": {
+ "Iterable": false
+ },
+ "properties": {
+ "createdAt": "2019-08-07 23:43:48 +00:00",
+ "reason": "DuplicateMarketingMessage",
+ "campaignId": 723636,
+ "messageId": "4238c918b20a41dfbe9a910275b76f12"
+ },
+ "receivedAt": "2024-03-03T04:48:29.000Z",
+ "timestamp": "2019-08-07T23:43:48.000Z",
+ "type": "track",
+ "request_ip": "192.0.2.30",
+ "messageId": "00000000-0000-0000-0000-000000000000"
+ }
+ ],
+ "errQueue": []
+ }
+}
diff --git a/jest.default.config.js b/jest.default.config.js
index 8661387a772..b9573c9609f 100644
--- a/jest.default.config.js
+++ b/jest.default.config.js
@@ -175,9 +175,15 @@ module.exports = {
// timers: "real",
// A map from regular expressions to paths to transformers
- // transform: {
- // '^.+\\.(ts|tsx)$': 'ts-jest',
- // },
+ transform: {
+ '^.+\\.[tj]sx?$': [
+ 'ts-jest',
+ {
+ tsconfig: 'tsconfig.json',
+ diagnostics: true,
+ },
+ ],
+ },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// transformIgnorePatterns: [
diff --git a/package-lock.json b/package-lock.json
index bf5b2de4924..342e2a55c3f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "rudder-transformer",
- "version": "1.118.0",
+ "version": "1.125.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "rudder-transformer",
- "version": "1.118.0",
+ "version": "1.125.0",
"license": "ISC",
"dependencies": {
"@amplitude/ua-parser-js": "0.7.24",
@@ -17,7 +17,7 @@
"@koa/router": "^12.0.0",
"@ndhoule/extend": "^2.0.0",
"@pyroscope/nodejs": "^0.4.5",
- "@rudderstack/integrations-lib": "^0.2.59",
+ "@rudderstack/integrations-lib": "^0.2.63",
"@rudderstack/json-template-engine": "^0.19.5",
"@rudderstack/workflow-engine": "^0.9.0",
"@shopify/jest-koa-mocks": "^5.1.1",
@@ -25,7 +25,7 @@
"ajv-draft-04": "^1.0.0",
"ajv-formats": "^2.1.1",
"amazon-dsp-formatter": "^1.0.2",
- "axios": "^1.7.9",
+ "axios": "^1.13.2",
"btoa": "^1.2.1",
"component-each": "^0.2.6",
"crypto-js": "^4.2.0",
@@ -47,11 +47,11 @@
"json-diff": "^1.0.3",
"json-size": "^1.0.0",
"jsontoxml": "^1.0.1",
- "koa": "^2.16.2",
+ "koa": "^3.0.3",
"koa-bodyparser": "^4.4.0",
"koa2-swagger-ui": "^5.7.0",
"libphonenumber-js": "^1.12.15",
- "lodash": "^4.17.21",
+ "lodash": "^4.17.23",
"lru-cache": "^11.1.0",
"match-json": "^1.3.5",
"md5": "^2.3.0",
@@ -64,7 +64,7 @@
"object-hash": "^3.0.0",
"parse-static-imports": "^1.1.0",
"prom-client": "^15.1.3",
- "qs": "^6.11.1",
+ "qs": "^6.14.1",
"rs-jsonpath": "^1.1.2",
"set-value": "^4.1.0",
"sha256": "^0.2.0",
@@ -75,7 +75,7 @@
"unset-value": "^2.0.1",
"uuid": "^11.1.0",
"valid-url": "^1.0.9",
- "validator": "^13.12.0",
+ "validator": "^13.15.22",
"zod": "^3.22.4"
},
"devDependencies": {
@@ -108,7 +108,7 @@
"eslint-plugin-prettier": "^5.2.1",
"eslint-plugin-sonarjs": "^0.19.0",
"eslint-plugin-unicorn": "^46.0.1",
- "glob": "^10.3.3",
+ "glob": "^10.5.0",
"http-terminator": "^3.2.0",
"husky": "^9.1.6",
"jest": "^29.5.0",
@@ -116,7 +116,7 @@
"jest-environment-node": "^29.7.0",
"jest-sonar": "^0.2.16",
"jest-when": "^3.5.2",
- "lint-staged": "^15.5.0",
+ "lint-staged": "^16.2.7",
"madge": "^6.1.0",
"mocked-env": "^1.3.5",
"node-notifier": "^10.0.1",
@@ -282,7 +282,9 @@
}
},
"node_modules/@apidevtools/swagger-cli/node_modules/js-yaml": {
- "version": "3.14.1",
+ "version": "3.14.2",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
+ "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -2599,7 +2601,9 @@
}
},
"node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": {
- "version": "3.14.1",
+ "version": "3.14.2",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
+ "integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3445,9 +3449,9 @@
}
},
"node_modules/@rudderstack/integrations-lib": {
- "version": "0.2.59",
- "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.59.tgz",
- "integrity": "sha512-D8n5oQmTcf7chq+d/D6COar8uTlCOiVMdkuvQ2yeiCmjVzvrge+Zh809yDM+aRqh/S1V6VFuY/LBaqWIoLkqbw==",
+ "version": "0.2.63",
+ "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.63.tgz",
+ "integrity": "sha512-JcUKQLppSADTHnLGNmMy27gbKWQwe6kYglWZmZB1WOhQv0nVf5yeZoM2PodIHzBc1PXDGBUUpQAviOdtb9pWBw==",
"license": "MIT",
"dependencies": {
"@rudderstack/featureflag-sdk-node": "^1.0.1",
@@ -3739,6 +3743,83 @@
"node": ">=18.12.0"
}
},
+ "node_modules/@shopify/jest-koa-mocks/node_modules/encodeurl": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
+ "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.8"
+ }
+ },
+ "node_modules/@shopify/jest-koa-mocks/node_modules/http-errors": {
+ "version": "1.8.1",
+ "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz",
+ "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==",
+ "license": "MIT",
+ "dependencies": {
+ "depd": "~1.1.2",
+ "inherits": "2.0.4",
+ "setprototypeof": "1.2.0",
+ "statuses": ">= 1.5.0 < 2",
+ "toidentifier": "1.0.1"
+ },
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/@shopify/jest-koa-mocks/node_modules/http-errors/node_modules/depd": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
+ "integrity": "sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
+ "node_modules/@shopify/jest-koa-mocks/node_modules/koa": {
+ "version": "2.16.3",
+ "resolved": "https://registry.npmjs.org/koa/-/koa-2.16.3.tgz",
+ "integrity": "sha512-zPPuIt+ku1iCpFBRwseMcPYQ1cJL8l60rSmKeOuGfOXyE6YnTBmf2aEFNL2HQGrD0cPcLO/t+v9RTgC+fwEh/g==",
+ "license": "MIT",
+ "dependencies": {
+ "accepts": "^1.3.5",
+ "cache-content-type": "^1.0.0",
+ "content-disposition": "~0.5.2",
+ "content-type": "^1.0.4",
+ "cookies": "~0.9.0",
+ "debug": "^4.3.2",
+ "delegates": "^1.0.0",
+ "depd": "^2.0.0",
+ "destroy": "^1.0.4",
+ "encodeurl": "^1.0.2",
+ "escape-html": "^1.0.3",
+ "fresh": "~0.5.2",
+ "http-assert": "^1.3.0",
+ "http-errors": "^1.6.3",
+ "is-generator-function": "^1.0.7",
+ "koa-compose": "^4.1.0",
+ "koa-convert": "^2.0.0",
+ "on-finished": "^2.3.0",
+ "only": "~0.0.2",
+ "parseurl": "^1.3.2",
+ "statuses": "^1.5.0",
+ "type-is": "^1.6.16",
+ "vary": "^1.1.2"
+ },
+ "engines": {
+ "node": "^4.8.4 || ^6.10.1 || ^7.10.1 || >= 8.1.4"
+ }
+ },
+ "node_modules/@shopify/jest-koa-mocks/node_modules/statuses": {
+ "version": "1.5.0",
+ "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz",
+ "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/@sinclair/typebox": {
"version": "0.27.8",
"dev": true,
@@ -3801,13 +3882,16 @@
}
},
"node_modules/@smithy/config-resolver": {
- "version": "4.0.1",
+ "version": "4.4.5",
+ "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.5.tgz",
+ "integrity": "sha512-HAGoUAFYsUkoSckuKbCPayECeMim8pOu+yLy1zOxt1sifzEbrsRpYa+mKcMdiHKMeiqOibyPG0sFJnmaV/OGEg==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/node-config-provider": "^4.0.1",
- "@smithy/types": "^4.1.0",
- "@smithy/util-config-provider": "^4.0.0",
- "@smithy/util-middleware": "^4.0.1",
+ "@smithy/node-config-provider": "^4.3.7",
+ "@smithy/types": "^4.11.0",
+ "@smithy/util-config-provider": "^4.2.0",
+ "@smithy/util-endpoints": "^3.2.7",
+ "@smithy/util-middleware": "^4.2.7",
"tslib": "^2.6.2"
},
"engines": {
@@ -4071,12 +4155,14 @@
}
},
"node_modules/@smithy/node-config-provider": {
- "version": "4.0.1",
+ "version": "4.3.7",
+ "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.7.tgz",
+ "integrity": "sha512-7r58wq8sdOcrwWe+klL9y3bc4GW1gnlfnFOuL7CXa7UzfhzhxKuzNdtqgzmTV+53lEp9NXh5hY/S4UgjLOzPfw==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/property-provider": "^4.0.1",
- "@smithy/shared-ini-file-loader": "^4.0.1",
- "@smithy/types": "^4.1.0",
+ "@smithy/property-provider": "^4.2.7",
+ "@smithy/shared-ini-file-loader": "^4.4.2",
+ "@smithy/types": "^4.11.0",
"tslib": "^2.6.2"
},
"engines": {
@@ -4098,10 +4184,12 @@
}
},
"node_modules/@smithy/property-provider": {
- "version": "4.0.1",
+ "version": "4.2.7",
+ "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.7.tgz",
+ "integrity": "sha512-jmNYKe9MGGPoSl/D7JDDs1C8b3dC8f/w78LbaVfoTtWy4xAd5dfjaFG9c9PWPihY4ggMQNQSMtzU77CNgAJwmA==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/types": "^4.1.0",
+ "@smithy/types": "^4.11.0",
"tslib": "^2.6.2"
},
"engines": {
@@ -4153,10 +4241,12 @@
}
},
"node_modules/@smithy/shared-ini-file-loader": {
- "version": "4.0.1",
+ "version": "4.4.2",
+ "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.2.tgz",
+ "integrity": "sha512-M7iUUff/KwfNunmrgtqBfvZSzh3bmFgv/j/t1Y1dQ+8dNo34br1cqVEqy6v0mYEgi0DkGO7Xig0AnuOaEGVlcg==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/types": "^4.1.0",
+ "@smithy/types": "^4.11.0",
"tslib": "^2.6.2"
},
"engines": {
@@ -4197,7 +4287,9 @@
}
},
"node_modules/@smithy/types": {
- "version": "4.1.0",
+ "version": "4.11.0",
+ "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.11.0.tgz",
+ "integrity": "sha512-mlrmL0DRDVe3mNrjTcVcZEgkFmufITfUAPBEA+AHYiIeYyJebso/He1qLbP3PssRe22KUzLRpQSdBPbXdgZ2VA==",
"license": "Apache-2.0",
"dependencies": {
"tslib": "^2.6.2"
@@ -4262,7 +4354,9 @@
}
},
"node_modules/@smithy/util-config-provider": {
- "version": "4.0.0",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/@smithy/util-config-provider/-/util-config-provider-4.2.0.tgz",
+ "integrity": "sha512-YEjpl6XJ36FTKmD+kRJJWYvrHeUvm5ykaUS5xK+6oXffQPHeEM4/nXlZPe+Wu0lsgRUcNZiliYNh/y7q9c2y6Q==",
"license": "Apache-2.0",
"dependencies": {
"tslib": "^2.6.2"
@@ -4302,11 +4396,13 @@
}
},
"node_modules/@smithy/util-endpoints": {
- "version": "3.0.1",
+ "version": "3.2.7",
+ "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.2.7.tgz",
+ "integrity": "sha512-s4ILhyAvVqhMDYREeTS68R43B1V5aenV5q/V1QpRQJkCXib5BPRo4s7uNdzGtIKxaPHCfU/8YkvPAEvTpxgspg==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/node-config-provider": "^4.0.1",
- "@smithy/types": "^4.1.0",
+ "@smithy/node-config-provider": "^4.3.7",
+ "@smithy/types": "^4.11.0",
"tslib": "^2.6.2"
},
"engines": {
@@ -4324,10 +4420,12 @@
}
},
"node_modules/@smithy/util-middleware": {
- "version": "4.0.1",
+ "version": "4.2.7",
+ "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.7.tgz",
+ "integrity": "sha512-i1IkpbOae6NvIKsEeLLM9/2q4X+M90KV3oCFgWQI4q0Qz+yUZvsr+gZPdAEAtFhWQhAHpTsJO8DRJPuwVyln+w==",
"license": "Apache-2.0",
"dependencies": {
- "@smithy/types": "^4.1.0",
+ "@smithy/types": "^4.11.0",
"tslib": "^2.6.2"
},
"engines": {
@@ -5605,7 +5703,9 @@
"license": "MIT"
},
"node_modules/axios": {
- "version": "1.11.0",
+ "version": "1.13.2",
+ "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.2.tgz",
+ "integrity": "sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==",
"license": "MIT",
"dependencies": {
"follow-redirects": "^1.15.6",
@@ -6092,6 +6192,8 @@
},
"node_modules/cache-content-type": {
"version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/cache-content-type/-/cache-content-type-1.0.1.tgz",
+ "integrity": "sha512-IKufZ1o4Ut42YUrZSo8+qnMTrFuKkvyoLXUywKz9GJ5BrhOFGhLdkx9sG4KAnVvbY6kEcSFjLQul+DVmBm2bgA==",
"license": "MIT",
"dependencies": {
"mime-types": "^2.1.18",
@@ -6391,22 +6493,26 @@
}
},
"node_modules/cli-truncate": {
- "version": "4.0.0",
+ "version": "5.1.1",
+ "resolved": "https://registry.npmjs.org/cli-truncate/-/cli-truncate-5.1.1.tgz",
+ "integrity": "sha512-SroPvNHxUnk+vIW/dOSfNqdy1sPEFkrTk6TUtqLCnBlo3N7TNYYkzzN7uSD6+jVjrdO4+p8nH7JzH6cIvUem6A==",
"dev": true,
"license": "MIT",
"dependencies": {
- "slice-ansi": "^5.0.0",
- "string-width": "^7.0.0"
+ "slice-ansi": "^7.1.0",
+ "string-width": "^8.0.0"
},
"engines": {
- "node": ">=18"
+ "node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/ansi-regex": {
- "version": "6.1.0",
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+ "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -6416,29 +6522,27 @@
"url": "https://github.com/chalk/ansi-regex?sponsor=1"
}
},
- "node_modules/cli-truncate/node_modules/emoji-regex": {
- "version": "10.4.0",
- "dev": true,
- "license": "MIT"
- },
"node_modules/cli-truncate/node_modules/string-width": {
- "version": "7.2.0",
+ "version": "8.1.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-8.1.0.tgz",
+ "integrity": "sha512-Kxl3KJGb/gxkaUMOjRsQ8IrXiGW75O4E3RPjFIINOVH8AMl2SQ/yWdTzWwF3FevIX9LcMAjJW+GRwAlAbTSXdg==",
"dev": true,
"license": "MIT",
"dependencies": {
- "emoji-regex": "^10.3.0",
- "get-east-asian-width": "^1.0.0",
+ "get-east-asian-width": "^1.3.0",
"strip-ansi": "^7.1.0"
},
"engines": {
- "node": ">=18"
+ "node": ">=20"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/cli-truncate/node_modules/strip-ansi": {
- "version": "7.1.0",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+ "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -6588,6 +6692,8 @@
},
"node_modules/colorette": {
"version": "2.0.20",
+ "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz",
+ "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==",
"dev": true,
"license": "MIT"
},
@@ -6680,6 +6786,13 @@
"url": "https://github.com/sponsors/isaacs"
}
},
+ "node_modules/commitizen/node_modules/lodash": {
+ "version": "4.17.21",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+ "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
+ "dev": true,
+ "license": "MIT"
+ },
"node_modules/commitlint": {
"version": "19.8.0",
"dev": true,
@@ -8530,7 +8643,9 @@
"license": "MIT"
},
"node_modules/encodeurl": {
- "version": "1.0.2",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
+ "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
"license": "MIT",
"engines": {
"node": ">= 0.8"
@@ -8576,6 +8691,8 @@
},
"node_modules/environment": {
"version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz",
+ "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==",
"dev": true,
"license": "MIT",
"engines": {
@@ -9989,7 +10106,9 @@
}
},
"node_modules/get-east-asian-width": {
- "version": "1.3.0",
+ "version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz",
+ "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==",
"dev": true,
"license": "MIT",
"engines": {
@@ -10482,7 +10601,9 @@
"license": "MIT"
},
"node_modules/glob": {
- "version": "10.4.5",
+ "version": "10.5.0",
+ "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz",
+ "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==",
"dev": true,
"license": "ISC",
"dependencies": {
@@ -11529,11 +11650,16 @@
}
},
"node_modules/is-fullwidth-code-point": {
- "version": "4.0.0",
+ "version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-5.1.0.tgz",
+ "integrity": "sha512-5XHYaSyiqADb4RnZ1Bdad6cPp8Toise4TzEjcOYDHZkTCbKgiUl7WTUCpNWHuxmDt91wnsZBc9xinNzopv3JMQ==",
"dev": true,
"license": "MIT",
+ "dependencies": {
+ "get-east-asian-width": "^1.3.1"
+ },
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
@@ -13609,7 +13735,9 @@
"license": "MIT"
},
"node_modules/js-yaml": {
- "version": "4.1.0",
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
+ "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
"license": "MIT",
"dependencies": {
"argparse": "^2.0.1"
@@ -13943,35 +14071,32 @@
}
},
"node_modules/koa": {
- "version": "2.16.2",
+ "version": "3.0.3",
+ "resolved": "https://registry.npmjs.org/koa/-/koa-3.0.3.tgz",
+ "integrity": "sha512-MeuwbCoN1daWS32/Ni5qkzmrOtQO2qrnfdxDHjrm6s4b59yG4nexAJ0pTEFyzjLp0pBVO80CZp0vW8Ze30Ebow==",
"license": "MIT",
"dependencies": {
- "accepts": "^1.3.5",
- "cache-content-type": "^1.0.0",
- "content-disposition": "~0.5.2",
- "content-type": "^1.0.4",
- "cookies": "~0.9.0",
- "debug": "^4.3.2",
+ "accepts": "^1.3.8",
+ "content-disposition": "~0.5.4",
+ "content-type": "^1.0.5",
+ "cookies": "~0.9.1",
"delegates": "^1.0.0",
- "depd": "^2.0.0",
- "destroy": "^1.0.4",
- "encodeurl": "^1.0.2",
+ "destroy": "^1.2.0",
+ "encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"fresh": "~0.5.2",
- "http-assert": "^1.3.0",
- "http-errors": "^1.6.3",
- "is-generator-function": "^1.0.7",
+ "http-assert": "^1.5.0",
+ "http-errors": "^2.0.0",
"koa-compose": "^4.1.0",
- "koa-convert": "^2.0.0",
- "on-finished": "^2.3.0",
- "only": "~0.0.2",
- "parseurl": "^1.3.2",
- "statuses": "^1.5.0",
- "type-is": "^1.6.16",
+ "mime-types": "^3.0.1",
+ "on-finished": "^2.4.1",
+ "parseurl": "^1.3.3",
+ "statuses": "^2.0.1",
+ "type-is": "^2.0.1",
"vary": "^1.1.2"
},
"engines": {
- "node": "^4.8.4 || ^6.10.1 || ^7.10.1 || >= 8.1.4"
+ "node": ">= 18"
}
},
"node_modules/koa-bodyparser": {
@@ -13992,6 +14117,8 @@
},
"node_modules/koa-convert": {
"version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/koa-convert/-/koa-convert-2.0.0.tgz",
+ "integrity": "sha512-asOvN6bFlSnxewce2e/DK3p4tltyfC4VM7ZwuTuepI7dEQVcvpyFuBcEARu1+Hxg8DIwytce2n7jrZtRlPrARA==",
"license": "MIT",
"dependencies": {
"co": "^4.6.0",
@@ -14001,30 +14128,50 @@
"node": ">= 10"
}
},
- "node_modules/koa/node_modules/http-errors": {
- "version": "1.8.1",
+ "node_modules/koa/node_modules/media-typer": {
+ "version": "1.1.0",
+ "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz",
+ "integrity": "sha512-aisnrDP4GNe06UcKFnV5bfMNPBUw4jsLGaWwWfnH3v02GnBuXX2MCVn5RbrWo0j3pczUilYblq7fQ7Nw2t5XKw==",
"license": "MIT",
- "dependencies": {
- "depd": "~1.1.2",
- "inherits": "2.0.4",
- "setprototypeof": "1.2.0",
- "statuses": ">= 1.5.0 < 2",
- "toidentifier": "1.0.1"
- },
"engines": {
- "node": ">= 0.6"
+ "node": ">= 0.8"
}
},
- "node_modules/koa/node_modules/http-errors/node_modules/depd": {
- "version": "1.1.2",
+ "node_modules/koa/node_modules/mime-db": {
+ "version": "1.54.0",
+ "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz",
+ "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
- "node_modules/koa/node_modules/statuses": {
- "version": "1.5.0",
+ "node_modules/koa/node_modules/mime-types": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.2.tgz",
+ "integrity": "sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==",
"license": "MIT",
+ "dependencies": {
+ "mime-db": "^1.54.0"
+ },
+ "engines": {
+ "node": ">=18"
+ },
+ "funding": {
+ "type": "opencollective",
+ "url": "https://opencollective.com/express"
+ }
+ },
+ "node_modules/koa/node_modules/type-is": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz",
+ "integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
+ "license": "MIT",
+ "dependencies": {
+ "content-type": "^1.0.5",
+ "media-typer": "^1.1.0",
+ "mime-types": "^3.0.0"
+ },
"engines": {
"node": ">= 0.6"
}
@@ -14164,196 +14311,43 @@
"version": "1.12.15",
"license": "MIT"
},
- "node_modules/lilconfig": {
- "version": "3.1.3",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/antonk52"
- }
- },
"node_modules/lines-and-columns": {
"version": "1.2.4",
"license": "MIT"
},
"node_modules/lint-staged": {
- "version": "15.5.0",
+ "version": "16.2.7",
+ "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-16.2.7.tgz",
+ "integrity": "sha512-lDIj4RnYmK7/kXMya+qJsmkRFkGolciXjrsZ6PC25GdTfWOAWetR0ZbsNXRAj1EHHImRSalc+whZFg56F5DVow==",
"dev": true,
"license": "MIT",
"dependencies": {
- "chalk": "^5.4.1",
- "commander": "^13.1.0",
- "debug": "^4.4.0",
- "execa": "^8.0.1",
- "lilconfig": "^3.1.3",
- "listr2": "^8.2.5",
+ "commander": "^14.0.2",
+ "listr2": "^9.0.5",
"micromatch": "^4.0.8",
+ "nano-spawn": "^2.0.0",
"pidtree": "^0.6.0",
"string-argv": "^0.3.2",
- "yaml": "^2.7.0"
+ "yaml": "^2.8.1"
},
"bin": {
"lint-staged": "bin/lint-staged.js"
},
"engines": {
- "node": ">=18.12.0"
+ "node": ">=20.17"
},
"funding": {
"url": "https://opencollective.com/lint-staged"
}
},
- "node_modules/lint-staged/node_modules/chalk": {
- "version": "5.4.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^12.17.0 || ^14.13 || >=16.0.0"
- },
- "funding": {
- "url": "https://github.com/chalk/chalk?sponsor=1"
- }
- },
- "node_modules/lint-staged/node_modules/commander": {
- "version": "13.1.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=18"
- }
- },
- "node_modules/lint-staged/node_modules/execa": {
- "version": "8.0.1",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "cross-spawn": "^7.0.3",
- "get-stream": "^8.0.1",
- "human-signals": "^5.0.0",
- "is-stream": "^3.0.0",
- "merge-stream": "^2.0.0",
- "npm-run-path": "^5.1.0",
- "onetime": "^6.0.0",
- "signal-exit": "^4.1.0",
- "strip-final-newline": "^3.0.0"
- },
- "engines": {
- "node": ">=16.17"
- },
- "funding": {
- "url": "https://github.com/sindresorhus/execa?sponsor=1"
- }
- },
- "node_modules/lint-staged/node_modules/get-stream": {
- "version": "8.0.1",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=16"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/human-signals": {
- "version": "5.0.0",
- "dev": true,
- "license": "Apache-2.0",
- "engines": {
- "node": ">=16.17.0"
- }
- },
- "node_modules/lint-staged/node_modules/is-stream": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/mimic-fn": {
- "version": "4.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/npm-run-path": {
- "version": "5.3.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "path-key": "^4.0.0"
- },
- "engines": {
- "node": "^12.20.0 || ^14.13.1 || >=16.0.0"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/onetime": {
- "version": "6.0.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "mimic-fn": "^4.0.0"
- },
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/path-key": {
- "version": "4.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
- "node_modules/lint-staged/node_modules/signal-exit": {
- "version": "4.1.0",
- "dev": true,
- "license": "ISC",
- "engines": {
- "node": ">=14"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
- "node_modules/lint-staged/node_modules/strip-final-newline": {
- "version": "3.0.0",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": ">=12"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/listr2": {
- "version": "8.2.5",
+ "version": "9.0.5",
+ "resolved": "https://registry.npmjs.org/listr2/-/listr2-9.0.5.tgz",
+ "integrity": "sha512-ME4Fb83LgEgwNw96RKNvKV4VTLuXfoKudAmm2lP8Kk87KaMK0/Xrx/aAkMWmT8mDb+3MlFDspfbCs7adjRxA2g==",
"dev": true,
"license": "MIT",
"dependencies": {
- "cli-truncate": "^4.0.0",
+ "cli-truncate": "^5.0.0",
"colorette": "^2.0.20",
"eventemitter3": "^5.0.1",
"log-update": "^6.1.0",
@@ -14361,11 +14355,13 @@
"wrap-ansi": "^9.0.0"
},
"engines": {
- "node": ">=18.0.0"
+ "node": ">=20.0.0"
}
},
"node_modules/listr2/node_modules/ansi-regex": {
- "version": "6.1.0",
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+ "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -14376,7 +14372,9 @@
}
},
"node_modules/listr2/node_modules/ansi-styles": {
- "version": "6.2.1",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -14387,17 +14385,23 @@
}
},
"node_modules/listr2/node_modules/emoji-regex": {
- "version": "10.4.0",
+ "version": "10.6.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
+ "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
"node_modules/listr2/node_modules/eventemitter3": {
- "version": "5.0.1",
+ "version": "5.0.4",
+ "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz",
+ "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==",
"dev": true,
"license": "MIT"
},
"node_modules/listr2/node_modules/string-width": {
"version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
+ "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14413,7 +14417,9 @@
}
},
"node_modules/listr2/node_modules/strip-ansi": {
- "version": "7.1.0",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+ "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14427,7 +14433,9 @@
}
},
"node_modules/listr2/node_modules/wrap-ansi": {
- "version": "9.0.0",
+ "version": "9.0.2",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
+ "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14498,7 +14506,9 @@
}
},
"node_modules/lodash": {
- "version": "4.17.21",
+ "version": "4.17.23",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
+ "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
"license": "MIT"
},
"node_modules/lodash._reinterpolate": {
@@ -14663,6 +14673,8 @@
},
"node_modules/log-update": {
"version": "6.1.0",
+ "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz",
+ "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14680,7 +14692,9 @@
}
},
"node_modules/log-update/node_modules/ansi-escapes": {
- "version": "7.0.0",
+ "version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz",
+ "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14694,7 +14708,9 @@
}
},
"node_modules/log-update/node_modules/ansi-regex": {
- "version": "6.1.0",
+ "version": "6.2.2",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+ "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -14705,7 +14721,9 @@
}
},
"node_modules/log-update/node_modules/ansi-styles": {
- "version": "6.2.1",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -14717,6 +14735,8 @@
},
"node_modules/log-update/node_modules/cli-cursor": {
"version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz",
+ "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14730,26 +14750,16 @@
}
},
"node_modules/log-update/node_modules/emoji-regex": {
- "version": "10.4.0",
+ "version": "10.6.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz",
+ "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==",
"dev": true,
"license": "MIT"
},
- "node_modules/log-update/node_modules/is-fullwidth-code-point": {
- "version": "5.0.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "get-east-asian-width": "^1.0.0"
- },
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/sponsors/sindresorhus"
- }
- },
"node_modules/log-update/node_modules/onetime": {
"version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz",
+ "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14764,6 +14774,8 @@
},
"node_modules/log-update/node_modules/restore-cursor": {
"version": "5.1.0",
+ "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz",
+ "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14779,6 +14791,8 @@
},
"node_modules/log-update/node_modules/signal-exit": {
"version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
+ "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
"dev": true,
"license": "ISC",
"engines": {
@@ -14788,23 +14802,10 @@
"url": "https://github.com/sponsors/isaacs"
}
},
- "node_modules/log-update/node_modules/slice-ansi": {
- "version": "7.1.0",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "ansi-styles": "^6.2.1",
- "is-fullwidth-code-point": "^5.0.0"
- },
- "engines": {
- "node": ">=18"
- },
- "funding": {
- "url": "https://github.com/chalk/slice-ansi?sponsor=1"
- }
- },
"node_modules/log-update/node_modules/string-width": {
"version": "7.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz",
+ "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14820,7 +14821,9 @@
}
},
"node_modules/log-update/node_modules/strip-ansi": {
- "version": "7.1.0",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+ "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -14834,7 +14837,9 @@
}
},
"node_modules/log-update/node_modules/wrap-ansi": {
- "version": "9.0.0",
+ "version": "9.0.2",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz",
+ "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -15361,6 +15366,8 @@
},
"node_modules/mimic-function": {
"version": "5.0.1",
+ "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz",
+ "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -15744,6 +15751,19 @@
"version": "0.0.8",
"license": "ISC"
},
+ "node_modules/nano-spawn": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/nano-spawn/-/nano-spawn-2.0.0.tgz",
+ "integrity": "sha512-tacvGzUY5o2D8CBh2rrwxyNojUsZNU2zjNTzKQrkgGJQTbGAfArVWXSKMBokBeeg6C7OLRGUEyoFlYbfeWQIqw==",
+ "dev": true,
+ "license": "MIT",
+ "engines": {
+ "node": ">=20.17"
+ },
+ "funding": {
+ "url": "https://github.com/sindresorhus/nano-spawn?sponsor=1"
+ }
+ },
"node_modules/nanoid": {
"version": "3.3.8",
"dev": true,
@@ -16181,7 +16201,9 @@
}
},
"node_modules/only": {
- "version": "0.0.2"
+ "version": "0.0.2",
+ "resolved": "https://registry.npmjs.org/only/-/only-0.0.2.tgz",
+ "integrity": "sha512-Fvw+Jemq5fjjyWz6CpKx6w9s7xxqo3+JCyM0WXWeCSOboZ8ABkyvP8ID4CZuChA/wxSx+XSJmdOm8rGVyJ1hdQ=="
},
"node_modules/open": {
"version": "7.4.2",
@@ -17281,7 +17303,9 @@
}
},
"node_modules/qs": {
- "version": "6.14.0",
+ "version": "6.14.1",
+ "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
+ "integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.1.0"
@@ -17794,6 +17818,8 @@
},
"node_modules/rfdc": {
"version": "1.4.1",
+ "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz",
+ "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==",
"dev": true,
"license": "MIT"
},
@@ -18306,22 +18332,26 @@
}
},
"node_modules/slice-ansi": {
- "version": "5.0.0",
+ "version": "7.1.2",
+ "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.2.tgz",
+ "integrity": "sha512-iOBWFgUX7caIZiuutICxVgX1SdxwAVFFKwt1EvMYYec/NWO5meOJ6K5uQxhrYBdQJne4KxiqZc+KptFOWFSI9w==",
"dev": true,
"license": "MIT",
"dependencies": {
- "ansi-styles": "^6.0.0",
- "is-fullwidth-code-point": "^4.0.0"
+ "ansi-styles": "^6.2.1",
+ "is-fullwidth-code-point": "^5.0.0"
},
"engines": {
- "node": ">=12"
+ "node": ">=18"
},
"funding": {
"url": "https://github.com/chalk/slice-ansi?sponsor=1"
}
},
"node_modules/slice-ansi/node_modules/ansi-styles": {
- "version": "6.2.1",
+ "version": "6.2.3",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+ "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
"dev": true,
"license": "MIT",
"engines": {
@@ -19263,7 +19293,9 @@
}
},
"node_modules/tar-fs": {
- "version": "2.1.3",
+ "version": "2.1.4",
+ "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.4.tgz",
+ "integrity": "sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==",
"license": "MIT",
"dependencies": {
"chownr": "^1.1.1",
@@ -19985,7 +20017,9 @@
}
},
"node_modules/validator": {
- "version": "13.12.0",
+ "version": "13.15.22",
+ "resolved": "https://registry.npmjs.org/validator/-/validator-13.15.22.tgz",
+ "integrity": "sha512-uT/YQjiyLJP7HSrv/dPZqK9L28xf8hsNca01HSz1dfmI0DgMfjopp1rO/z13NeGF1tVystF0Ejx3y4rUKPw+bQ==",
"license": "MIT",
"engines": {
"node": ">= 0.10"
@@ -20425,13 +20459,18 @@
"license": "ISC"
},
"node_modules/yaml": {
- "version": "2.7.0",
+ "version": "2.8.2",
+ "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz",
+ "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==",
"license": "ISC",
"bin": {
"yaml": "bin.mjs"
},
"engines": {
- "node": ">= 14"
+ "node": ">= 14.6"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/eemeli"
}
},
"node_modules/yargs": {
@@ -20469,6 +20508,8 @@
},
"node_modules/ylru": {
"version": "1.4.0",
+ "resolved": "https://registry.npmjs.org/ylru/-/ylru-1.4.0.tgz",
+ "integrity": "sha512-2OQsPNEmBCvXuFlIni/a+Rn+R2pHW9INm0BxXJ4hVDA8TirqMj+J/Rp9ItLatT/5pZqWwefVrTQcHpixsxnVlA==",
"license": "MIT",
"engines": {
"node": ">= 4.0.0"
diff --git a/package.json b/package.json
index 7e32fab7e6b..6d263efcd31 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "rudder-transformer",
- "version": "1.118.0",
+ "version": "1.125.0",
"description": "",
"homepage": "https://github.com/rudderlabs/rudder-transformer#readme",
"bugs": {
@@ -68,7 +68,7 @@
"@koa/router": "^12.0.0",
"@ndhoule/extend": "^2.0.0",
"@pyroscope/nodejs": "^0.4.5",
- "@rudderstack/integrations-lib": "^0.2.59",
+ "@rudderstack/integrations-lib": "^0.2.63",
"@rudderstack/json-template-engine": "^0.19.5",
"@rudderstack/workflow-engine": "^0.9.0",
"@shopify/jest-koa-mocks": "^5.1.1",
@@ -76,7 +76,7 @@
"ajv-draft-04": "^1.0.0",
"ajv-formats": "^2.1.1",
"amazon-dsp-formatter": "^1.0.2",
- "axios": "^1.7.9",
+ "axios": "^1.13.2",
"btoa": "^1.2.1",
"component-each": "^0.2.6",
"crypto-js": "^4.2.0",
@@ -98,11 +98,11 @@
"json-diff": "^1.0.3",
"json-size": "^1.0.0",
"jsontoxml": "^1.0.1",
- "koa": "^2.16.2",
+ "koa": "^3.0.3",
"koa-bodyparser": "^4.4.0",
"koa2-swagger-ui": "^5.7.0",
"libphonenumber-js": "^1.12.15",
- "lodash": "^4.17.21",
+ "lodash": "^4.17.23",
"lru-cache": "^11.1.0",
"match-json": "^1.3.5",
"md5": "^2.3.0",
@@ -115,7 +115,7 @@
"object-hash": "^3.0.0",
"parse-static-imports": "^1.1.0",
"prom-client": "^15.1.3",
- "qs": "^6.11.1",
+ "qs": "^6.14.1",
"rs-jsonpath": "^1.1.2",
"set-value": "^4.1.0",
"sha256": "^0.2.0",
@@ -126,7 +126,7 @@
"unset-value": "^2.0.1",
"uuid": "^11.1.0",
"valid-url": "^1.0.9",
- "validator": "^13.12.0",
+ "validator": "^13.15.22",
"zod": "^3.22.4"
},
"devDependencies": {
@@ -159,7 +159,7 @@
"eslint-plugin-prettier": "^5.2.1",
"eslint-plugin-sonarjs": "^0.19.0",
"eslint-plugin-unicorn": "^46.0.1",
- "glob": "^10.3.3",
+ "glob": "^10.5.0",
"http-terminator": "^3.2.0",
"husky": "^9.1.6",
"jest": "^29.5.0",
@@ -167,7 +167,7 @@
"jest-environment-node": "^29.7.0",
"jest-sonar": "^0.2.16",
"jest-when": "^3.5.2",
- "lint-staged": "^15.5.0",
+ "lint-staged": "^16.2.7",
"madge": "^6.1.0",
"mocked-env": "^1.3.5",
"node-notifier": "^10.0.1",
diff --git a/src/adapters/networkHandlerFactory.test.js b/src/adapters/networkHandlerFactory.test.js
deleted file mode 100644
index e2b4a231e13..00000000000
--- a/src/adapters/networkHandlerFactory.test.js
+++ /dev/null
@@ -1,34 +0,0 @@
-const { getNetworkHandler } = require('./networkHandlerFactory');
-const { networkHandler: GenericNetworkHandler } = require('./networkhandler/genericNetworkHandler');
-
-describe(`Network Handler Tests`, () => {
- it('Should return v0 networkhandler', () => {
- let { networkHandler, handlerVersion } = getNetworkHandler('campaign_manager', `v0`);
- const cmProxy = require(`../v0/destinations/campaign_manager/networkHandler`).networkHandler;
- expect(networkHandler).toEqual(new cmProxy());
- });
-
- it('Should return v0 networkhandler braze', () => {
- let { networkHandler, handlerVersion } = getNetworkHandler('braze', `v0`);
- const brazeProxy = require(`../v0/destinations/braze/networkHandler`).networkHandler;
- expect(networkHandler).toEqual(new brazeProxy());
- });
-
- it('Should return v1 networkhandler', () => {
- let { networkHandler, handlerVersion } = getNetworkHandler('campaign_manager', `v1`);
- const cmProxy = require(`../v1/destinations/campaign_manager/networkHandler`).networkHandler;
- expect(networkHandler).toEqual(new cmProxy());
- });
-
- it('Should return v0 handler if v1 version and handler is present for destination in v0', () => {
- const { networkHandler, handlerVersion } = getNetworkHandler('braze', `v1`);
- const brazeProxy = require(`../v0/destinations/braze/networkHandler`).networkHandler;
- expect(networkHandler).toEqual(new brazeProxy());
- });
-
- it('Should return generic handler', () => {
- const { networkHandler, handlerVersion } = getNetworkHandler('abc', `v1`);
- const brazeProxy = require(`../v0/destinations/braze/networkHandler`).networkHandler;
- expect(networkHandler).toEqual(new GenericNetworkHandler());
- });
-});
diff --git a/src/adapters/networkHandlerFactory.test.ts b/src/adapters/networkHandlerFactory.test.ts
new file mode 100644
index 00000000000..ff875cce867
--- /dev/null
+++ b/src/adapters/networkHandlerFactory.test.ts
@@ -0,0 +1,33 @@
+import { getNetworkHandler } from './networkHandlerFactory';
+import { networkHandler as GenericNetworkHandler } from './networkhandler/genericNetworkHandler';
+
+describe('Network Handler Tests', () => {
+ it('Should return v0 networkhandler', () => {
+ const { networkHandler } = getNetworkHandler('campaign_manager', 'v0');
+ const cmProxy = require('../v0/destinations/campaign_manager/networkHandler').networkHandler;
+ expect(networkHandler).toEqual(new cmProxy());
+ });
+
+ it('Should return v0 networkhandler braze', () => {
+ const { networkHandler } = getNetworkHandler('braze', 'v0');
+ const brazeProxy = require('../v0/destinations/braze/networkHandler').networkHandler;
+ expect(networkHandler).toEqual(new brazeProxy());
+ });
+
+ it('Should return v1 networkhandler', () => {
+ const { networkHandler } = getNetworkHandler('campaign_manager', 'v1');
+ const cmProxy = require('../v1/destinations/campaign_manager/networkHandler').networkHandler;
+ expect(networkHandler).toEqual(new cmProxy());
+ });
+
+ it('Should return v0 handler if v1 version and handler is present for destination in v0', () => {
+ const { networkHandler } = getNetworkHandler('braze', 'v1');
+ const brazeProxy = require('../v0/destinations/braze/networkHandler').networkHandler;
+ expect(networkHandler).toEqual(new brazeProxy());
+ });
+
+ it('Should return generic handler', () => {
+ const { networkHandler } = getNetworkHandler('abc', 'v1');
+ expect(networkHandler).toEqual(new GenericNetworkHandler());
+ });
+});
diff --git a/src/cdk/v2/constants/cdkV2DestinationsMap.ts b/src/cdk/v2/constants/cdkV2DestinationsMap.ts
new file mode 100644
index 00000000000..865287f6569
--- /dev/null
+++ b/src/cdk/v2/constants/cdkV2DestinationsMap.ts
@@ -0,0 +1,56 @@
+/**
+ * Transformer-owned list of destination types that are considered "CDK v2 family".
+ */
+export const cdkV2DestinationsMap: Record = {
+ ACCOIL_ANALYTICS: true,
+ ALGOLIA: true,
+ AUTOPILOT: true,
+ BINGADS_AUDIENCE: true,
+ BLOOMREACH: true,
+ BLOOMREACH_CATALOG: true,
+ BLUECORE: true,
+ CLICKSEND: true,
+ CORDIAL: true,
+ DCM_FLOODLIGHT: true,
+ DYNAMIC_YIELD: true,
+ ELOQUA: true,
+ EMARSYS: true,
+ FULLSTORY: true,
+ GLADLY: true,
+ HEAP: true,
+ HTTP: true,
+ INTERCOM: true,
+ KLAVIYO_BULK_UPLOAD: true,
+ KOALA: true,
+ KOCHAVA: true,
+ KODDI: true,
+ LAUNCHDARKLY_AUDIENCE: true,
+ LINKEDIN_ADS: true,
+ LINKEDIN_AUDIENCE: true,
+ LOOPS: true,
+ LYTICS: true,
+ MOVABLE_INK: true,
+ NEW_RELIC: true,
+ NINETAILED: true,
+ OPTIMIZELY_FULLSTACK: true,
+ ORTTO: true,
+ PINTEREST_TAG: true,
+ RAKUTEN: true,
+ REDDIT: true,
+ SMARTLY: true,
+ SPRIG: true,
+ STATSIG: true,
+ THE_TRADE_DESK: true,
+ THE_TRADE_DESK_REAL_TIME_CONVERSIONS: true,
+ TIKTOK_AUDIENCE: true,
+ USERLIST: true,
+ USERPILOT: true,
+ VARIANCE: true,
+ VITALLY: true,
+ WEBHOOK: true,
+ WEBHOOK_V2: true,
+ WUNDERKIND: true,
+ YANDEX_METRICA_OFFLINE_EVENTS: true,
+ ZAPIER: true,
+ ZOHO: true,
+};
diff --git a/src/cdk/v2/destinations/http/procWorkflow.yaml b/src/cdk/v2/destinations/http/procWorkflow.yaml
index 1371eded905..9a9f68d3558 100644
--- a/src/cdk/v2/destinations/http/procWorkflow.yaml
+++ b/src/cdk/v2/destinations/http/procWorkflow.yaml
@@ -46,8 +46,7 @@ steps:
- name: prepareParams
template: |
- const params = $.getCustomMappings(.message, .destination.Config.queryParams);
- $.context.params = $.encodeParamsObject(params);
+ $.context.params = $.validateQueryParams($.getCustomMappings(.message, .destination.Config.queryParams));
- name: deduceEndPoint
template: |
diff --git a/src/cdk/v2/destinations/http/utils.js b/src/cdk/v2/destinations/http/utils.js
index d7b3e8efb22..79772444dcb 100644
--- a/src/cdk/v2/destinations/http/utils.js
+++ b/src/cdk/v2/destinations/http/utils.js
@@ -67,16 +67,12 @@ const getCustomMappings = (message, mapping) => {
}
};
-const encodeParamsObject = (params) => {
- if (!params || typeof params !== 'object') {
+const validateQueryParams = (params) => {
+ if (!params || typeof params !== 'object' || Array.isArray(params)) {
return {}; // Return an empty object if input is null, undefined, or not an object
}
- return Object.keys(params)
- .filter((key) => params[key] !== undefined)
- .reduce((acc, key) => {
- acc[encodeURIComponent(key)] = encodeURIComponent(params[key]);
- return acc;
- }, {});
+ const filteredKeys = Object.keys(params).filter((key) => params[key] !== undefined);
+ return Object.fromEntries(filteredKeys.map((key) => [key, params[key]]));
};
const getPathValueFromJsonpath = (message, path) => {
@@ -253,7 +249,7 @@ module.exports = {
getAuthHeaders,
enhanceMappings,
getCustomMappings,
- encodeParamsObject,
+ validateQueryParams,
prepareEndpoint,
metadataHeaders,
prepareBody,
diff --git a/src/cdk/v2/destinations/http/utils.test.js b/src/cdk/v2/destinations/http/utils.test.js
index 2310b840e78..e0eccb4c6d7 100644
--- a/src/cdk/v2/destinations/http/utils.test.js
+++ b/src/cdk/v2/destinations/http/utils.test.js
@@ -1,6 +1,5 @@
const {
enhanceMappings,
- encodeParamsObject,
prepareEndpoint,
prepareBody,
stringifyFirstLevelValues,
@@ -10,20 +9,6 @@ const { XMLBuilder } = require('fast-xml-parser');
const jsonpath = require('rs-jsonpath');
describe('Utils Functions', () => {
- describe('encodeParamsObject', () => {
- test('should return empty object for invalid inputs', () => {
- expect(encodeParamsObject(null)).toEqual({});
- expect(encodeParamsObject(undefined)).toEqual({});
- expect(encodeParamsObject('string')).toEqual({});
- });
-
- test('should encode object keys and values', () => {
- const params = { key1: 'value1', key2: 'value2 3 4' };
- const expected = { key1: 'value1', key2: 'value2%203%204' };
- expect(encodeParamsObject(params)).toEqual(expected);
- });
- });
-
describe('prepareEndpoint', () => {
test('should preserve single trailing slash when pathParams is empty array', () => {
const message = { id: 123 };
diff --git a/src/cdk/v2/destinations/reddit/utils.test.js b/src/cdk/v2/destinations/reddit/utils.test.js
index d9c7a5bdea6..f1d30083164 100644
--- a/src/cdk/v2/destinations/reddit/utils.test.js
+++ b/src/cdk/v2/destinations/reddit/utils.test.js
@@ -709,20 +709,30 @@ describe('generateAndValidateTimestamp', () => {
// Accepts timestamp exactly 168 hours old
it('should accept timestamp exactly 168 hours old', () => {
- const validOldDate = new Date(Date.now() - 168 * 60 * 60 * 1000).toISOString();
+ const mockNow = 1600000000000; // Fixed timestamp
+ jest.spyOn(Date, 'now').mockReturnValue(mockNow);
+
+ const validOldDate = new Date(mockNow - 168 * 60 * 60 * 1000).toISOString();
const result = generateAndValidateTimestamp(validOldDate);
expect(typeof result).toBe('number');
expect(result).toBeGreaterThan(0);
+
+ Date.now.mockRestore();
});
// Accepts timestamp exactly 5 minutes in the future
it('should accept timestamp exactly 5 minutes in the future', () => {
- const validFutureDate = new Date(Date.now() + 5 * 60 * 1000).toISOString();
+ const mockNow = 1600000000000; // Fixed timestamp
+ jest.spyOn(Date, 'now').mockReturnValue(mockNow);
+
+ const validFutureDate = new Date(mockNow + 5 * 60 * 1000).toISOString();
const result = generateAndValidateTimestamp(validFutureDate);
expect(typeof result).toBe('number');
expect(result).toBeGreaterThan(0);
+
+ Date.now.mockRestore();
});
// Accepts timestamp as Unix milliseconds
diff --git a/src/cdk/v2/handler.ts b/src/cdk/v2/handler.ts
index e3976872305..e3547af0280 100644
--- a/src/cdk/v2/handler.ts
+++ b/src/cdk/v2/handler.ts
@@ -14,7 +14,7 @@ import {
getPlatformBindingsPaths,
getRootPathForDestination,
getWorkflowPath,
- isCdkV2Destination,
+ shouldUseCdkV2,
} from './utils';
import logger from '../../logger';
@@ -93,7 +93,8 @@ export async function processCdkV2Workflow(
const workflowEngine = await getCachedWorkflowEngine(destType, feature, bindings);
return await executeWorkflow(workflowEngine, parsedEvent, requestMetadata);
} catch (error) {
- throw getErrorInfo(error, isCdkV2Destination(parsedEvent), defTags);
+ const workspaceId = parsedEvent.metadata?.workspaceId ?? '';
+ throw getErrorInfo(error, shouldUseCdkV2(destType, workspaceId), defTags);
}
}
diff --git a/src/cdk/v2/utils.test.ts b/src/cdk/v2/utils.test.ts
new file mode 100644
index 00000000000..3782c57597f
--- /dev/null
+++ b/src/cdk/v2/utils.test.ts
@@ -0,0 +1,79 @@
+import { shouldUseCdkV2 } from './utils';
+
+describe('cdk/v2 utils', () => {
+ describe('shouldUseCdkV2', () => {
+ type Case = {
+ name: string;
+ destType: string;
+ workspaceId: string;
+ env?: { key: string; value?: string };
+ expected: boolean;
+ };
+
+ const cases: Case[] = [
+ {
+ name: 'returns false when destination is not CDK v2 enabled',
+ destType: 'some_unknown_destination',
+ workspaceId: 'w1',
+ expected: false,
+ },
+ {
+ name: 'returns true when destination is enabled and env toggle is not set',
+ destType: 'webhook',
+ workspaceId: 'w1',
+ env: { key: 'DISABLE_WEBHOOK_CDK_V2' }, // ensure unset
+ expected: true,
+ },
+ {
+ name: 'returns false when DISABLE__CDK_V2 is ALL',
+ destType: 'WEBHOOK',
+ workspaceId: 'w1',
+ env: { key: 'DISABLE_WEBHOOK_CDK_V2', value: 'ALL' },
+ expected: false,
+ },
+ {
+ name: 'returns true when DISABLE__CDK_V2 is NONE',
+ destType: 'WEBHOOK',
+ workspaceId: 'w1',
+ env: { key: 'DISABLE_WEBHOOK_CDK_V2', value: 'NONE' },
+ expected: true,
+ },
+ {
+ name: 'returns false only for workspaceIds listed in DISABLE__CDK_V2 (blocked)',
+ destType: 'webhook',
+ workspaceId: 'w1',
+ env: { key: 'DISABLE_WEBHOOK_CDK_V2', value: 'w1,w2' },
+ expected: false,
+ },
+ {
+ name: 'returns true only for workspaceIds listed in DISABLE__CDK_V2 (allowed)',
+ destType: 'webhook',
+ workspaceId: 'w3',
+ env: { key: 'DISABLE_WEBHOOK_CDK_V2', value: 'w1,w2' },
+ expected: true,
+ },
+ ];
+
+ test.each(cases)('$name', ({ destType, workspaceId, env, expected }) => {
+ const previousValue = env ? process.env[env.key] : undefined;
+ if (env) {
+ if (env.value === undefined) {
+ delete process.env[env.key];
+ } else {
+ process.env[env.key] = env.value;
+ }
+ }
+ try {
+ expect(shouldUseCdkV2(destType, workspaceId)).toBe(expected);
+ } finally {
+ if (env) {
+ if (previousValue === undefined) {
+ delete process.env[env.key];
+ } else {
+ process.env[env.key] = previousValue;
+ }
+ }
+ }
+ });
+ });
+});
diff --git a/src/cdk/v2/utils.ts b/src/cdk/v2/utils.ts
index 0c3d8b30220..aa16ea16a19 100644
--- a/src/cdk/v2/utils.ts
+++ b/src/cdk/v2/utils.ts
@@ -6,6 +6,7 @@ import logger from '../../logger';
import { generateErrorObject } from '../../v0/util';
import tags from '../../v0/util/tags';
import { CatchErr } from '../../types';
+import { cdkV2DestinationsMap } from './constants/cdkV2DestinationsMap';
const CDK_V2_ROOT_DIR = __dirname;
@@ -109,10 +110,34 @@ export function getErrorInfo(err: CatchErr, isProd: boolean, defTags) {
return generateErrorObject(new PlatformError(message), defTags);
}
-export function isCdkV2Destination(event) {
- return Boolean(event?.destination?.DestinationDefinition?.Config?.cdkV2Enabled);
-}
+export function shouldUseCdkV2(destType: string, workspaceId: string) {
+ const destTypeUpper = destType.toUpperCase();
+
+ // Check if the destination type is in the CDK v2 enabled map
+ if (!cdkV2DestinationsMap[destTypeUpper]) {
+ return false;
+ }
-export function getCdkV2TestThreshold(event) {
- return event.destination?.DestinationDefinition?.Config?.cdkV2TestThreshold || 0;
+ // Check if the destination type is disabled for the environment variable
+ // DISABLE__CDK_V2
+ const envValue = process.env[`DISABLE_${destTypeUpper}_CDK_V2`];
+ if (!envValue) {
+ return true;
+ }
+
+ const normalized = envValue.trim();
+ switch (normalized) {
+ case 'ALL':
+ return false;
+ case 'NONE':
+ return true;
+ default:
+ // Comma-separated allow/deny list of workspaceIds.
+ // If the workspaceId is listed, CDK v2 is disabled for it.
+ return !normalized
+ .split(',')
+ .map((s) => s.trim())
+ .filter(Boolean)
+ .includes(workspaceId);
+ }
}
diff --git a/src/constants/destinationCanonicalNames.js b/src/constants/destinationCanonicalNames.js
index 819ada2aa1b..f457d011f3c 100644
--- a/src/constants/destinationCanonicalNames.js
+++ b/src/constants/destinationCanonicalNames.js
@@ -188,6 +188,7 @@ const DestCanonicalNames = {
cordial: ['cordial', 'Cordial', 'CORDIAL'],
clevertap: ['clevertap', 'Clevertap', 'CleverTap', 'CLEVERTAP'],
airship: ['airship', 'Airship', 'AIRSHIP'],
+ singular: ['Singular'],
};
module.exports = { DestHandlerMap, DestCanonicalNames };
diff --git a/src/controllers/__tests__/source.test.ts b/src/controllers/__tests__/source.test.ts
index a1226f63d6c..1399c841f77 100644
--- a/src/controllers/__tests__/source.test.ts
+++ b/src/controllers/__tests__/source.test.ts
@@ -333,5 +333,84 @@ describe('Source controller tests', () => {
});
});
});
+
+ describe('X-Rudder-Permanent-Error header', () => {
+ const testCases = [
+ {
+ description: 'should set header when job has 400 status code',
+ hydrationOutput: {
+ batch: [{ event: { field: 'value1' }, statusCode: 400, errorMessage: 'Bad Request' }],
+ },
+ expectedHeader: 'true',
+ },
+ {
+ description: 'should set header when job has 404 status code',
+ hydrationOutput: {
+ batch: [{ event: { field: 'value1' }, statusCode: 404, errorMessage: 'Not Found' }],
+ },
+ expectedHeader: 'true',
+ },
+ {
+ description: 'should set header when one of multiple jobs has 4xx error',
+ hydrationOutput: {
+ batch: [
+ { event: { field: 'value1' }, statusCode: 200 },
+ { event: { field: 'value2' }, statusCode: 403, errorMessage: 'Forbidden' },
+ ],
+ },
+ expectedHeader: 'true',
+ },
+ {
+ description: 'should NOT set header when job has 429 status code',
+ hydrationOutput: {
+ batch: [
+ { event: { field: 'value1' }, statusCode: 429, errorMessage: 'Too Many Requests' },
+ ],
+ },
+ expectedHeader: undefined,
+ },
+ {
+ description: 'should NOT set header when all jobs are successful (200)',
+ hydrationOutput: {
+ batch: [{ event: { field: 'value1' }, statusCode: 200 }],
+ },
+ expectedHeader: undefined,
+ },
+ {
+ description: 'should NOT set header when job has 500 status code',
+ hydrationOutput: {
+ batch: [
+ {
+ event: { field: 'value1' },
+ statusCode: 500,
+ errorMessage: 'Internal Server Error',
+ },
+ ],
+ },
+ expectedHeader: undefined,
+ },
+ ];
+
+ testCases.forEach(({ description, hydrationOutput, expectedHeader }) => {
+ test(description, async () => {
+ const mockSourceService = new NativeIntegrationSourceService();
+ mockSourceService.sourceHydrateRoutine = jest.fn().mockResolvedValue(hydrationOutput);
+
+ const getNativeSourceServiceSpy = jest
+ .spyOn(ServiceSelector, 'getNativeSourceService')
+ .mockImplementation(() => mockSourceService);
+
+ const response = await request(server)
+ .post(`/v2/sources/${sourceType}/hydrate`)
+ .set('Accept', 'application/json')
+ .send({ source: { id: 'sourceId' }, batch: [] });
+
+ expect(response.header['x-rudder-permanent-error']).toEqual(expectedHeader);
+
+ expect(getNativeSourceServiceSpy).toHaveBeenCalledTimes(1);
+ expect(mockSourceService.sourceHydrateRoutine).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
});
});
diff --git a/src/controllers/source.ts b/src/controllers/source.ts
index 3fff8228827..556c0212256 100644
--- a/src/controllers/source.ts
+++ b/src/controllers/source.ts
@@ -70,6 +70,19 @@ export class SourceController {
const firstError = response.batch.find(
(job) => job.statusCode >= HTTP_STATUS_CODES.BAD_REQUEST,
);
+
+ // Check if any event has a 4xx status code (except 429)
+ const hasPermanentError = response.batch.some(
+ (job) =>
+ job.statusCode >= 400 &&
+ job.statusCode < 500 &&
+ job.statusCode !== HTTP_STATUS_CODES.TOO_MANY_REQUESTS,
+ );
+
+ if (hasPermanentError) {
+ ctx.set('X-Rudder-Permanent-Error', 'true');
+ }
+
if (firstError) {
// Since server doesn't handle partial success
// no need to return events in case of any error
diff --git a/src/controllers/userTransform.ts b/src/controllers/userTransform.ts
index 3af126a21fc..f7344e532f8 100644
--- a/src/controllers/userTransform.ts
+++ b/src/controllers/userTransform.ts
@@ -12,6 +12,28 @@ import { reconcileFunction } from '../util/openfaas/index';
import { ControllerUtility } from './util';
import logger from '../logger';
+interface Dependencies {
+ libraries: {
+ versionId: string;
+ }[];
+ credentials: {
+ key: string;
+ value: string;
+ isSecret: boolean;
+ }[];
+}
+
+interface TestRunRequestBody {
+ input: { message: Record; metadata?: Record }[];
+ codeRevision: {
+ code: string;
+ language: string;
+ versionId: string;
+ codeVersion?: string;
+ };
+ dependencies?: Dependencies;
+}
+
export class UserTransformController {
/**
reconcileFunction is a controller function to reconcile the openfaas
@@ -53,6 +75,40 @@ export class UserTransformController {
return ctx;
}
+ /**
+ * testRun is a controller function that executes a test run of user-provided transformation code
+ * using the given input data and optional dependencies. This is typically used to validate
+ * transformation logic by running it as a test before deployment.
+ *
+ * Expects the following body structure (TestRunRequestBody):
+ * - input: array of objects representing input records
+ * - code: string containing the transformation code to test
+ * - language: programming language used for the transformation (e.g., 'javascript')
+ * - codeVersion (optional): string representing code version, defaults to '1' if not provided
+ * - dependencies (optional):
+ * - libraries: array of library objects with `versionId` for each library to load in the test environment
+ * - credentials: array of credential objects to provide for the test
+ *
+ * Responds with the result of the test execution.
+ *
+ * @param ctx - The Koa request/response context object.
+ */
+ public static async testRun(ctx: Context) {
+ const { input, codeRevision, dependencies } = ctx.request.body as TestRunRequestBody;
+
+ const response = await UserTransformService.testTransformRoutine(
+ input,
+ { ...codeRevision, codeVersion: codeRevision.codeVersion || '1' },
+ (dependencies?.libraries ?? []).map((library) => library.versionId),
+ dependencies?.credentials ?? [],
+ true,
+ );
+
+ ctx.body = response.body;
+ ControllerUtility.postProcess(ctx, response.status);
+ return ctx;
+ }
+
public static async testTransformLibrary(ctx: Context) {
try {
const { code, language = 'javascript' } = ctx.request.body as any;
diff --git a/src/helpers/__tests__/serviceSelector.test.ts b/src/helpers/__tests__/serviceSelector.test.ts
index e0efff91ec7..85e468d693b 100644
--- a/src/helpers/__tests__/serviceSelector.test.ts
+++ b/src/helpers/__tests__/serviceSelector.test.ts
@@ -26,28 +26,12 @@ describe('ServiceSelector Service', () => {
);
});
- test('isCdkV2Destination should return true', async () => {
- const destinationDefinitionConfig = {
- cdkV2Enabled: true,
- };
- expect(ServiceSelector['isCdkV2Destination'](destinationDefinitionConfig)).toBe(true);
- });
-
- test('isCdkV2Destination should return false', async () => {
- const destinationDefinitionConfig = {
- cdkV2EnabledXYZ: true,
- };
- expect(ServiceSelector['isCdkV2Destination'](destinationDefinitionConfig)).toBe(false);
- });
-
test('getPrimaryDestinationService should return cdk v2 dest service', async () => {
const events = [
{
destination: {
DestinationDefinition: {
- Config: {
- cdkV2Enabled: true,
- },
+ Name: 'WEBHOOK',
},
},
},
diff --git a/src/helpers/serviceSelector.ts b/src/helpers/serviceSelector.ts
index 0d508da6066..962d2af0e2f 100644
--- a/src/helpers/serviceSelector.ts
+++ b/src/helpers/serviceSelector.ts
@@ -5,7 +5,8 @@ import { INTEGRATION_SERVICE } from '../routes/utils/constants';
import { CDKV2DestinationService } from '../services/destination/cdkV2Integration';
import { NativeIntegrationDestinationService } from '../services/destination/nativeIntegration';
import { NativeIntegrationSourceService } from '../services/source/nativeIntegration';
-import { FixMe, ProcessorTransformationRequest, RouterTransformationRequestData } from '../types';
+import { ProcessorTransformationRequest, RouterTransformationRequestData } from '../types';
+import { shouldUseCdkV2 } from '../cdk/v2/utils';
export class ServiceSelector {
private static serviceMap: Map = new Map();
@@ -16,10 +17,6 @@ export class ServiceSelector {
[INTEGRATION_SERVICE.NATIVE_SOURCE]: NativeIntegrationSourceService,
};
- private static isCdkV2Destination(destinationDefinitionConfig: FixMe) {
- return Boolean(destinationDefinitionConfig?.cdkV2Enabled);
- }
-
private static fetchCachedService(serviceType: string) {
if (this.serviceMap.has(serviceType)) {
return this.serviceMap.get(serviceType);
@@ -43,9 +40,9 @@ export class ServiceSelector {
private static getPrimaryDestinationService(
events: ProcessorTransformationRequest[] | RouterTransformationRequestData[],
): DestinationService {
- const destinationDefinitionConfig: FixMe =
- events[0]?.destination?.DestinationDefinition?.Config;
- if (this.isCdkV2Destination(destinationDefinitionConfig)) {
+ const destinationType = events[0]?.destination?.DestinationDefinition?.Name ?? '';
+ const workspaceId = events[0]?.metadata?.workspaceId ?? '';
+ if (shouldUseCdkV2(destinationType, workspaceId)) {
return this.fetchCachedService(INTEGRATION_SERVICE.CDK_V2_DEST);
}
return this.fetchCachedService(INTEGRATION_SERVICE.NATIVE_DEST);
diff --git a/src/routes/userTransform.ts b/src/routes/userTransform.ts
index e2883bdc22f..81db3cee476 100644
--- a/src/routes/userTransform.ts
+++ b/src/routes/userTransform.ts
@@ -24,6 +24,11 @@ router.post(
RouteActivationMiddleware.isUserTransformTestRouteActive,
UserTransformController.testTransform,
);
+router.post(
+ '/transformation/testRun',
+ RouteActivationMiddleware.isUserTransformTestRouteActive,
+ UserTransformController.testRun,
+);
router.post(
'/transformationLibrary/test',
RouteActivationMiddleware.isUserTransformTestRouteActive,
diff --git a/src/services/destination/postTransformation.ts b/src/services/destination/postTransformation.ts
index d443ab4e005..284749e13aa 100644
--- a/src/services/destination/postTransformation.ts
+++ b/src/services/destination/postTransformation.ts
@@ -16,6 +16,7 @@ import {
} from '../../types';
import stats from '../../util/stats';
import { generateErrorObject } from '../../v0/util';
+import { DeleteUsersError } from '../../v0/util/errorTypes/deleteUsersError';
import tags from '../../v0/util/tags';
import { ErrorReportingService } from '../errorReporting';
import logger from '../../logger';
@@ -235,6 +236,13 @@ export class DestinationPostTransformationService {
metaTo: MetaTransferObject,
): UserDeletionResponse {
const errObj = generateErrorObject(error, metaTo.errorDetails, false);
+ // Use logMessage if available to avoid logging PII
+ const logMessage = error instanceof DeleteUsersError ? error.logMessage : errObj.message;
+ logger.error('User deletion failed', {
+ errorMessage: logMessage,
+ destinationId: metaTo.errorDetails.destinationId,
+ destType: metaTo.errorDetails.destType,
+ });
stats.increment('regulation_worker_user_deletion_failure', {
destType: metaTo.errorDetails.destType,
diff --git a/src/services/source/nativeIntegration.ts b/src/services/source/nativeIntegration.ts
index 88d99d16312..f3698e3c618 100644
--- a/src/services/source/nativeIntegration.ts
+++ b/src/services/source/nativeIntegration.ts
@@ -21,7 +21,6 @@ import stats from '../../util/stats';
import tags from '../../v0/util/tags';
import { SourcePostTransformationService } from './postTransformation';
import logger from '../../logger';
-import { getBodyFromV2SpecPayload } from '../../v0/util';
import { HTTP_STATUS_CODES } from '../../v0/util/constant';
const SUPPORTED_HYDRATION_SOURCE_TYPES = ['facebook_lead_ads_native'];
@@ -63,20 +62,20 @@ export class NativeIntegrationSourceService implements SourceService {
stats.increment('source_transform_errors', {
source: sourceType,
});
- logger.debug(`Error during source Transform: ${error}`, {
+ logger.error(`Error during source Transform: ${error}`, {
...logger.getLogMetadata(metaTO.errorDetails),
});
- // log the payload schema here
- const duplicateSourceEvent: any = sourceEvent;
+ const requestCopy = {
+ // Spreading to avoid mutation of the original object
+ ...sourceEvent.request,
+ };
try {
- duplicateSourceEvent.output.request.body = getBodyFromV2SpecPayload(
- duplicateSourceEvent?.output,
- );
+ requestCopy.body = JSON.parse(requestCopy.body);
} catch (e) {
/* empty */
}
logger.error(
- `Sample Payload Schema for source ${sourceType} : ${JSON.stringify(JsonSchemaGenerator.generate(duplicateSourceEvent))}`,
+ `Request schema for source ${sourceType} : ${JSON.stringify(JsonSchemaGenerator.generate(requestCopy))}`,
);
return SourcePostTransformationService.handleFailureEventsSource(error, metaTO);
diff --git a/src/services/userTransform.ts b/src/services/userTransform.ts
index 95c251e945d..d624d96af8d 100644
--- a/src/services/userTransform.ts
+++ b/src/services/userTransform.ts
@@ -220,7 +220,13 @@ export class UserTransformService {
} as UserTransformationServiceResponse;
}
- public static async testTransformRoutine(events, trRevCode, libraryVersionIDs, credentials) {
+ public static async testTransformRoutine(
+ events,
+ trRevCode,
+ libraryVersionIDs,
+ credentials,
+ returnMetadata = false,
+ ) {
const response: FixMe = {};
let errorCode: number | undefined;
try {
@@ -249,6 +255,7 @@ export class UserTransformService {
libraryVersionIDs,
trRevCode,
true,
+ returnMetadata,
);
response.status = 200;
} catch (error: CatchErr) {
diff --git a/src/sources/appsflyer/transform.js b/src/sources/appsflyer/transform.js
index 07e60f136e3..bf0a0004765 100644
--- a/src/sources/appsflyer/transform.js
+++ b/src/sources/appsflyer/transform.js
@@ -23,7 +23,7 @@ function createBaseMessage(eventName) {
function processEvent(event) {
if (!event.event_name) {
- throw new TransformationError('Unknwon event type from Appsflyer');
+ throw new TransformationError('Unknown event type from Appsflyer');
}
const message = createBaseMessage(event.event_name);
diff --git a/src/sources/braze/transform.js b/src/sources/braze/transform.js
index 5d19e26fb3c..21c8423cdec 100644
--- a/src/sources/braze/transform.js
+++ b/src/sources/braze/transform.js
@@ -89,7 +89,7 @@ const process = (payload) => {
// Figure out a way to handle partial failures within batch
// responses.push({
// statusCode: 400,
- // error: error.message || "Unknwon error"
+ // error: error.message || "Unknown error"
// });
}
});
diff --git a/src/sources/facebook_lead_ads_native/hydrate.test.ts b/src/sources/facebook_lead_ads_native/hydrate.test.ts
index e93fc32c0b2..a773f805ad2 100644
--- a/src/sources/facebook_lead_ads_native/hydrate.test.ts
+++ b/src/sources/facebook_lead_ads_native/hydrate.test.ts
@@ -343,8 +343,11 @@ describe('Facebook Lead Ads Hydration', () => {
const result = await hydrate(input);
expect(result.batch).toHaveLength(1);
- expect(result.batch[0].statusCode).toBe(401);
- expect(result.batch[0].errorMessage).toBe('Invalid OAuth access token');
+ // Facebook error handler maps code 190 to status 400 (auth errors)
+ expect(result.batch[0].statusCode).toBe(400);
+ expect(result.batch[0].errorMessage).toBe(
+ 'Invalid OAuth access token. Facebook responded with error code: 190',
+ );
});
it('should handle mixed success and failure responses', async () => {
@@ -381,8 +384,9 @@ describe('Facebook Lead Ads Hydration', () => {
expect(result.batch[0].statusCode).toBe(200);
expect(result.batch[0].event?.context?.traits?.full_name).toBe('John Doe');
expect(result.batch[0].errorMessage).toBeUndefined();
- expect(result.batch[1].statusCode).toBe(404);
- expect(result.batch[1].errorMessage).toBe('Lead not found');
+ // Unknown error without code mapping defaults to 500 and stringifies the error
+ expect(result.batch[1].statusCode).toBe(500);
+ expect(result.batch[1].errorMessage).toBe('{"message":"Lead not found"}');
});
describe('Input validation', () => {
diff --git a/src/sources/facebook_lead_ads_native/hydrate.ts b/src/sources/facebook_lead_ads_native/hydrate.ts
index 56d27bab18b..8607e72777b 100644
--- a/src/sources/facebook_lead_ads_native/hydrate.ts
+++ b/src/sources/facebook_lead_ads_native/hydrate.ts
@@ -1,5 +1,5 @@
import { z } from 'zod';
-import { formatZodError, InstrumentationError } from '@rudderstack/integrations-lib';
+import { BaseError, formatZodError, InstrumentationError } from '@rudderstack/integrations-lib';
import { httpGET } from '../../adapters/network';
import { processAxiosResponse } from '../../adapters/utils/networkUtils';
import {
@@ -11,6 +11,7 @@ import {
SourceHydrationRequest,
} from '../../types/sourceHydration';
import { HTTP_STATUS_CODES } from '../../v0/util/constant';
+import { errorResponseHandler } from '../../v0/util/facebookUtils/networkHandler';
// Complete schema
const FacebookLeadAdsHydrationInputSchema = SourceHydrationRequestSchema.extend({
@@ -86,10 +87,24 @@ async function fetchLeadData(
statusCode: HTTP_STATUS_CODES.OK,
};
}
- return {
- statusCode: processedResponse.status,
- error: processedResponse.response?.error?.message || 'Unknown error',
- };
+
+ // Use Facebook's error handler for proper error classification
+ try {
+ errorResponseHandler({
+ response: processedResponse.response,
+ status: processedResponse.status,
+ });
+ } catch (error: unknown) {
+ if (error instanceof BaseError) {
+ return {
+ statusCode: error.status,
+ error: error.message,
+ };
+ }
+ throw new Error(`Unexpected: unknown error type ${error}`);
+ }
+ // This should never be reached since errorResponseHandler always throws for errors
+ throw new Error('Unexpected: errorResponseHandler did not throw for non-OK response');
}
/**
diff --git a/src/sources/iterable/transform.js b/src/sources/iterable/transform.js
index ffb96c26b30..4bdaad5ab47 100644
--- a/src/sources/iterable/transform.js
+++ b/src/sources/iterable/transform.js
@@ -1,19 +1,35 @@
const path = require('path');
const fs = require('fs');
const md5 = require('md5');
-const { TransformationError } = require('@rudderstack/integrations-lib');
+const {
+ TransformationError,
+ isDefinedAndNotNullAndNotEmpty,
+} = require('@rudderstack/integrations-lib');
const Message = require('../message');
-const { getBodyFromV2SpecPayload } = require('../../v0/util');
+const { getBodyFromV2SpecPayload, isDefinedAndNotNull } = require('../../v0/util');
// import mapping json using JSON.parse to preserve object key order
const mapping = JSON.parse(fs.readFileSync(path.resolve(__dirname, './mapping.json'), 'utf-8'));
+const isNonEmptyString = (val) => typeof val === 'string' && isDefinedAndNotNullAndNotEmpty(val);
+
+/**
+ * Throws an error if required fields are not present.
+ * Ref: https://support.iterable.com/hc/en-us/articles/208013936-System-Webhooks#system-webhook-request-body
+ * @param {*} event
+ */
+function checkForRequiredFields(event) {
+ if (
+ (!isNonEmptyString(event.email) && !isNonEmptyString(event.userId)) ||
+ !isNonEmptyString(event.eventName)
+ ) {
+ throw new TransformationError('Unknown event type from Iterable');
+ }
+}
+
function process(payload) {
const event = getBodyFromV2SpecPayload(payload);
- // throw an error if (email, eventName) are not present
- if (!(event.email && event.eventName)) {
- throw new TransformationError('Unknwon event type from Iterable');
- }
+ checkForRequiredFields(event);
const message = new Message(`Iterable`);
// event type is always track
@@ -38,7 +54,7 @@ function process(payload) {
// Treating userId as unique identifier
// If userId is not present, then generating it from email using md5 hash function
- if (message.userId === null || message.userId === undefined) {
+ if (!isDefinedAndNotNull(message.userId)) {
message.userId = md5(event.email);
}
diff --git a/src/types/controlPlaneConfig.ts b/src/types/controlPlaneConfig.ts
index 771ebef3989..58d6589961a 100644
--- a/src/types/controlPlaneConfig.ts
+++ b/src/types/controlPlaneConfig.ts
@@ -6,7 +6,6 @@ export type DestinationDefinition = {
Name: string;
DisplayName: string;
Config: Record;
- ResponseRules?: Record | null;
};
export type AccountDefinitionConfig = { refreshOAuthToken?: string };
diff --git a/src/types/destinationTransformation.ts b/src/types/destinationTransformation.ts
index bcd0fec9aec..41d3209dad9 100644
--- a/src/types/destinationTransformation.ts
+++ b/src/types/destinationTransformation.ts
@@ -47,6 +47,7 @@ export type BatchedRequest<
type: string;
method: string;
endpoint: string;
+ endpointPath?: string;
headers: THeaders;
params: TParams;
files: Record;
@@ -65,6 +66,19 @@ export type BatchRequestOutput<
destination: TDestination;
};
+export type MultiBatchRequestOutput<
+ TPayload = Record,
+ THeaders = Record,
+ TParams = Record,
+ TDestination = Destination,
+> = {
+ batchedRequest: BatchedRequest[];
+ metadata: Partial[];
+ batched: boolean;
+ statusCode: number;
+ destination: TDestination;
+};
+
/**
* Output structure for processor transformations
*/
diff --git a/src/util/customTransformer.js b/src/util/customTransformer.js
index 8ecd7ac4616..835acef4d76 100644
--- a/src/util/customTransformer.js
+++ b/src/util/customTransformer.js
@@ -331,6 +331,7 @@ async function userTransformHandler(
libraryVersionIDs,
trRevCode = {},
testMode = false,
+ returnMetadata = false,
) {
if (versionId) {
const res = testMode ? trRevCode : await getTransformationCode(versionId);
@@ -357,15 +358,17 @@ async function userTransformHandler(
userTransformedEvents = result.transformedEvents;
if (testMode) {
- userTransformedEvents = {
- transformedEvents: result.transformedEvents.map((ev) => {
- if (ev.error) {
- return { error: ev.error };
- }
- return ev.transformedEvent;
- }),
- logs: result.logs,
- };
+ userTransformedEvents = returnMetadata
+ ? result
+ : {
+ transformedEvents: result.transformedEvents.map((ev) => {
+ if (ev.error) {
+ return { error: ev.error };
+ }
+ return ev.transformedEvent;
+ }),
+ logs: result.logs,
+ };
}
} else {
result = await runUserTransform(
diff --git a/src/util/openfaas/index.js b/src/util/openfaas/index.js
index 016a2f3f3b0..2465c47c6aa 100644
--- a/src/util/openfaas/index.js
+++ b/src/util/openfaas/index.js
@@ -340,7 +340,7 @@ function buildOpenfaasFn(name, code, versionId, libraryVersionIDs, testMode, trM
'com.openfaas.scale.type': FAAS_SCALE_TYPE,
transformationId: trMetadata.transformationId,
workspaceId: trMetadata.workspaceId,
- team: 'data-management',
+ team: 'pipelines',
service: 'openfaas-fn',
customer: 'shared',
'customer-tier': CUSTOMER_TIER,
diff --git a/src/util/prometheus.js b/src/util/prometheus.js
index a2d358b7c1c..2729c6d5a70 100644
--- a/src/util/prometheus.js
+++ b/src/util/prometheus.js
@@ -484,21 +484,31 @@ class Prometheus {
{
name: 'braze_batch_attributes_pack_size',
- help: 'braze_batch_attributes_pack_size',
- type: 'gauge',
+ help: 'Distribution of attributes count per batch chunk',
+ type: 'histogram',
labelNames: ['destination_id'],
+ buckets: [1, 5, 10, 20, 30, 40, 50, 60, 70, 75],
},
{
name: 'braze_batch_events_pack_size',
- help: 'braze_batch_events_pack_size',
- type: 'gauge',
+ help: 'Distribution of events count per batch chunk',
+ type: 'histogram',
labelNames: ['destination_id'],
+ buckets: [1, 5, 10, 20, 30, 40, 50, 60, 70, 75],
},
{
name: 'braze_batch_purchase_pack_size',
- help: 'braze_batch_purchase_pack_size',
- type: 'gauge',
+ help: 'Distribution of purchases count per batch chunk',
+ type: 'histogram',
labelNames: ['destination_id'],
+ buckets: [1, 5, 10, 20, 30, 40, 50, 60, 70, 75],
+ },
+ {
+ name: 'braze_batch_total_pack_size',
+ help: 'Distribution of total count per batch chunk',
+ type: 'histogram',
+ labelNames: ['destination_id'],
+ buckets: [1, 5, 10, 20, 30, 40, 50, 60, 70, 75, 100, 125, 150, 175, 200, 225],
},
{
name: 'braze_alias_failure_count',
@@ -1045,12 +1055,6 @@ class Prometheus {
labelNames: ['destination_id', 'source_id'],
buckets: [10, 50, 100, 200, 500, 800, 1000],
},
- {
- name: 'salesforce_soql_lookup_count',
- help: 'Count of SOQL-based lookups executed using Salesforce SDK',
- type: 'counter',
- labelNames: ['method', 'objectType', 'workspaceId'],
- },
];
for (const metric of metrics) {
diff --git a/src/util/utils.js b/src/util/utils.js
index 558862db38f..cd50cb1ea9c 100644
--- a/src/util/utils.js
+++ b/src/util/utils.js
@@ -3,6 +3,7 @@ const http = require('http');
const https = require('https');
const { Resolver } = require('dns').promises;
const fetch = require('node-fetch');
+const { AsyncLocalStorage } = require('node:async_hooks');
const util = require('util');
const NodeCache = require('node-cache');
@@ -10,6 +11,7 @@ const logger = require('../logger');
const stats = require('./stats');
const resolver = new Resolver();
+const dnsCallbackStorage = new AsyncLocalStorage();
const BLOCK_HOST_NAMES = process.env.BLOCK_HOST_NAMES || '';
const BLOCK_HOST_NAMES_LIST = BLOCK_HOST_NAMES.split(',');
@@ -45,16 +47,16 @@ const fetchAddressFromHostName = async (hostname) => {
};
const staticLookup =
- (transformationTags, fetchAddress = fetchAddressFromHostName) =>
+ (fetchAddress = fetchAddressFromHostName) =>
(hostname, options, cb) => {
const resolveStartTime = new Date();
+ const onDnsResolved = dnsCallbackStorage.getStore();
fetchAddress(hostname)
.then(({ address, cacheHit }) => {
- stats.timing('fetch_dns_resolve_time', resolveStartTime, {
- ...transformationTags,
- cacheHit,
- });
+ if (onDnsResolved) {
+ onDnsResolved({ resolveStartTime, cacheHit, error: false });
+ }
if (!address) {
cb(new Error(`resolved empty list of IP address for ${hostname}`), null);
@@ -68,20 +70,54 @@ const staticLookup =
})
.catch((error) => {
logger.error(`DNS Error Code: ${error.code} | Message : ${error.message}`);
- stats.timing('fetch_dns_resolve_time', resolveStartTime, {
- ...transformationTags,
- error: 'true',
- });
+ if (onDnsResolved) {
+ onDnsResolved({ resolveStartTime, cacheHit: false, error: true });
+ }
cb(new Error(`unable to resolve IP address for ${hostname}`), null);
});
};
-const httpAgentWithDnsLookup = (scheme, transformationTags) => {
- const httpModule = scheme === 'http' ? http : https;
- return new httpModule.Agent({ lookup: staticLookup(transformationTags) });
+const parseEnvInt = (value, defaultValue) => {
+ if (!value) return defaultValue;
+ const parsed = Number.parseInt(value, 10);
+ return Number.isNaN(parsed) ? defaultValue : parsed;
+};
+
+const SHARED_HTTP_AGENT_DISABLE_KEEP_ALIVE =
+ process.env.SHARED_HTTP_AGENT_DISABLE_KEEP_ALIVE === 'true';
+// Socket inactivity timeout. Only starts after a socket is acquired and connected.
+// Resets whenever data flows; does not protect against socket pool exhaustion.
+const SHARED_HTTP_AGENT_TIMEOUT_MS = parseEnvInt(process.env.SHARED_HTTP_AGENT_TIMEOUT_MS, 60000);
+const SHARED_HTTP_AGENT_MAX_SOCKETS = parseEnvInt(process.env.SHARED_HTTP_AGENT_MAX_SOCKETS, 200);
+const SHARED_HTTP_AGENT_MAX_FREE_SOCKETS = parseEnvInt(
+ process.env.SHARED_HTTP_AGENT_MAX_FREE_SOCKETS,
+ 10,
+);
+
+const sharedAgentOptions = {
+ keepAlive: !SHARED_HTTP_AGENT_DISABLE_KEEP_ALIVE,
+ timeout: SHARED_HTTP_AGENT_TIMEOUT_MS,
+ maxSockets: SHARED_HTTP_AGENT_MAX_SOCKETS,
+ maxFreeSockets: SHARED_HTTP_AGENT_MAX_FREE_SOCKETS,
};
+const sharedHttpAgent = new http.Agent(sharedAgentOptions);
+const sharedHttpsAgent = new https.Agent(sharedAgentOptions);
+
+const sharedHttpAgentWithLookup = new http.Agent({
+ ...sharedAgentOptions,
+ lookup: staticLookup(),
+});
+
+const sharedHttpsAgentWithLookup = new https.Agent({
+ ...sharedAgentOptions,
+ lookup: staticLookup(),
+});
+
const blockLocalhostRequests = (url) => {
+ if (process.env.ALLOW_LOCALHOST_FETCH === 'true') {
+ return;
+ }
try {
const parseUrl = new URL(url);
const { hostname } = parseUrl;
@@ -103,10 +139,6 @@ const blockInvalidProtocolRequests = (url) => {
};
const fetchWithDnsWrapper = async (transformationTags, ...args) => {
- if (process.env.DNS_RESOLVE_FETCH_HOST !== 'true') {
- return await fetch(...args);
- }
-
if (args.length === 0) {
throw new Error('fetch url is required');
}
@@ -115,9 +147,24 @@ const fetchWithDnsWrapper = async (transformationTags, ...args) => {
blockInvalidProtocolRequests(fetchURL);
const fetchOptions = args[1] || {};
const schemeName = fetchURL.startsWith('https') ? 'https' : 'http';
- // assign resolved agent to fetch
- fetchOptions.agent = httpAgentWithDnsLookup(schemeName, transformationTags);
- return await fetch(fetchURL, fetchOptions);
+
+ if (process.env.DNS_RESOLVE_FETCH_HOST !== 'true') {
+ fetchOptions.agent = schemeName === 'https' ? sharedHttpsAgent : sharedHttpAgent;
+ return await fetch(fetchURL, fetchOptions);
+ }
+
+ const onDnsResolved = ({ resolveStartTime, cacheHit, error }) => {
+ // Destructure to exclude isSuccess which is not part of fetch_dns_resolve_time labelset
+ const { isSuccess, ...dnsMetricTags } = transformationTags;
+ stats.timing('fetch_dns_resolve_time', resolveStartTime, {
+ ...dnsMetricTags,
+ ...(error ? { error: 'true' } : { cacheHit }),
+ });
+ };
+
+ fetchOptions.agent =
+ schemeName === 'https' ? sharedHttpsAgentWithLookup : sharedHttpAgentWithLookup;
+ return dnsCallbackStorage.run(onDnsResolved, () => fetch(fetchURL, fetchOptions));
};
class RespStatusError extends Error {
@@ -254,6 +301,7 @@ module.exports = {
extractStackTraceUptoLastSubstringMatch,
fetchWithDnsWrapper,
staticLookup,
+ dnsCallbackStorage,
shouldSkipDynamicConfigProcessing,
shouldGroupByDestinationConfig,
};
diff --git a/src/util/utils.test.js b/src/util/utils.test.js
index ead605aab89..32292ed1622 100644
--- a/src/util/utils.test.js
+++ b/src/util/utils.test.js
@@ -1,10 +1,42 @@
-const { staticLookup } = require('./utils');
+jest.mock('node-fetch');
+jest.mock('./stats');
+jest.mock('dns', () => ({
+ promises: {
+ Resolver: jest.fn().mockImplementation(() => ({
+ resolve4: jest.fn().mockResolvedValue([{ address: '93.184.216.34', ttl: 300 }]),
+ })),
+ },
+}));
+
+const fetch = require('node-fetch');
+const stats = require('./stats');
+const { staticLookup, dnsCallbackStorage, fetchWithDnsWrapper } = require('./utils');
+
+describe('asyncHooks behaviour', () => {
+ it('should propagate correctly', () => {
+ const { AsyncLocalStorage } = require('async_hooks');
+ const ctx = new AsyncLocalStorage();
+
+ let count = 0;
+ const someFunction = () => {
+ const data = ctx.getStore();
+ if (count === 0) {
+ expect(data).toBe('test1');
+ } else {
+ expect(data).toBe('test2');
+ }
+ count++;
+ };
+ ctx.run('test1', someFunction);
+ ctx.run('test2', someFunction);
+ });
+});
describe('staticLookup', () => {
- const transformationTags = { tag: 'value' };
const RECORD_TYPE_A = 4;
const HOST_NAME = 'example.com';
const fetchAddressFromHostName = jest.fn();
+ const onDnsResolved = jest.fn();
beforeEach(() => {
jest.clearAllMocks();
@@ -15,31 +47,36 @@ describe('staticLookup', () => {
name: 'should resolve the hostname and return the IP address',
mockResponse: { address: '192.168.1.1', cacheHit: true },
expectedArgs: [null, '192.168.1.1', RECORD_TYPE_A],
+ expectedDnsResolvedCall: { cacheHit: true, error: false },
},
{
name: 'should resolve the hostname and return the IP address with all option',
options: { all: true },
mockResponse: { address: '192.168.1.1', cacheHit: true },
expectedArgs: [null, [{ address: '192.168.1.1', family: RECORD_TYPE_A }]],
+ expectedDnsResolvedCall: { cacheHit: true, error: false },
},
{
name: 'should handle errors from fetchAddressFromHostName',
mockResponse: { error: 'DNS error', errorCode: 'ENOTFOUND' },
expectedArgs: [new Error(`unable to resolve IP address for ${HOST_NAME}`), null],
+ expectedDnsResolvedCall: { cacheHit: false, error: true },
},
{
name: 'should handle empty address',
mockResponse: { address: '', cacheHit: true },
expectedArgs: [new Error(`resolved empty list of IP address for ${HOST_NAME}`), null],
+ expectedDnsResolvedCall: { cacheHit: true, error: false },
},
{
name: 'should handle localhost address',
mockResponse: { address: '127.0.0.1', cacheHit: true },
expectedArgs: [new Error(`cannot use 127.0.0.1 as IP address`), null],
+ expectedDnsResolvedCall: { cacheHit: true, error: false },
},
];
- testCases.forEach(({ name, options, mockResponse, expectedArgs }) => {
+ testCases.forEach(({ name, options, mockResponse, expectedArgs, expectedDnsResolvedCall }) => {
it(name, async () => {
if (mockResponse.error) {
const error = new Error(mockResponse.error);
@@ -49,13 +86,104 @@ describe('staticLookup', () => {
fetchAddressFromHostName.mockResolvedValueOnce(mockResponse);
}
- const resolve = staticLookup(transformationTags, fetchAddressFromHostName);
+ const resolve = staticLookup(fetchAddressFromHostName);
const callback = (...args) => {
expect(fetchAddressFromHostName).toHaveBeenCalledWith(HOST_NAME);
expect(args).toEqual(expectedArgs);
+ expect(onDnsResolved).toHaveBeenCalledWith(
+ expect.objectContaining(expectedDnsResolvedCall),
+ );
};
- resolve(HOST_NAME, options, callback);
+ dnsCallbackStorage.run(onDnsResolved, () => {
+ resolve(HOST_NAME, options, callback);
+ });
+ });
+ });
+});
+
+describe('fetchWithDnsWrapper', () => {
+ const originalEnv = process.env;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ process.env = { ...originalEnv };
+ fetch.mockResolvedValue({ ok: true, status: 200 });
+ });
+
+ afterEach(() => {
+ process.env = originalEnv;
+ });
+
+ it('should set up onDnsResolved callback that calls stats.timing with transformation tags', async () => {
+ process.env.DNS_RESOLVE_FETCH_HOST = 'true';
+ const transformationTags = { workspaceId: 'ws123', transformationId: 'tr456' };
+
+ // Capture the callback stored in dnsCallbackStorage during the fetch
+ let capturedCallback;
+ const originalRun = dnsCallbackStorage.run.bind(dnsCallbackStorage);
+ jest.spyOn(dnsCallbackStorage, 'run').mockImplementation((callback, fn) => {
+ capturedCallback = callback;
+ return originalRun(callback, fn);
});
+
+ await fetchWithDnsWrapper(transformationTags, 'https://example.com/api');
+
+ // Simulate DNS resolution callback
+ capturedCallback({ resolveStartTime: new Date(), cacheHit: true, error: false });
+
+ expect(stats.timing).toHaveBeenCalledWith(
+ 'fetch_dns_resolve_time',
+ expect.any(Date),
+ expect.objectContaining({
+ workspaceId: 'ws123',
+ transformationId: 'tr456',
+ cacheHit: true,
+ }),
+ );
+ });
+
+ it('should use shared https agent for https URLs', async () => {
+ process.env.DNS_RESOLVE_FETCH_HOST = 'true';
+
+ await fetchWithDnsWrapper({}, 'https://example.com/api');
+
+ expect(fetch).toHaveBeenCalledWith(
+ 'https://example.com/api',
+ expect.objectContaining({
+ agent: expect.any(Object),
+ }),
+ );
+
+ const firstCallAgent = fetch.mock.calls[0][1].agent;
+
+ await fetchWithDnsWrapper({}, 'https://example.com/other');
+
+ const secondCallAgent = fetch.mock.calls[1][1].agent;
+ expect(secondCallAgent).toBe(firstCallAgent);
+ });
+
+ it('should use shared http agent for http URLs', async () => {
+ process.env.DNS_RESOLVE_FETCH_HOST = 'true';
+
+ await fetchWithDnsWrapper({}, 'http://example.com/api');
+ const firstCallAgent = fetch.mock.calls[0][1].agent;
+
+ await fetchWithDnsWrapper({}, 'http://example.com/other');
+ const secondCallAgent = fetch.mock.calls[1][1].agent;
+
+ expect(secondCallAgent).toBe(firstCallAgent);
+ });
+
+ it('should bypass DNS wrapper when DNS_RESOLVE_FETCH_HOST is not true', async () => {
+ process.env.DNS_RESOLVE_FETCH_HOST = 'false';
+
+ await fetchWithDnsWrapper({}, 'https://example.com/api');
+
+ expect(fetch).toHaveBeenCalledWith(
+ 'https://example.com/api',
+ expect.objectContaining({ agent: expect.any(Object) }),
+ );
+ expect(stats.timing).not.toHaveBeenCalled();
});
});
diff --git a/src/v0/destinations/attentive_tag/transform.js b/src/v0/destinations/attentive_tag/transform.js
index 34cdd568a5e..80f2d35e995 100644
--- a/src/v0/destinations/attentive_tag/transform.js
+++ b/src/v0/destinations/attentive_tag/transform.js
@@ -32,6 +32,7 @@ const responseBuilder = (payload, apiKey, endpoint) => {
if (payload) {
const response = defaultRequestConfig();
response.endpoint = `${BASE_URL}${endpoint}`;
+ response.endpointPath = endpoint;
response.headers = {
Authorization: `Bearer ${apiKey}`,
'Content-Type': JSON_MIME_TYPE,
diff --git a/src/v0/destinations/autopilot/config.js b/src/v0/destinations/autopilot/config.js
deleted file mode 100644
index 69a56f3ac2b..00000000000
--- a/src/v0/destinations/autopilot/config.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const { getMappingConfig } = require('../../util');
-
-const baseEndpoint = 'https://api2.autopilothq.com/v1';
-const endpoints = {
- addContactUrl: `${baseEndpoint}/contact`, // add a contact, | Identify
- triggerJourneyUrl: `${baseEndpoint}/trigger`, // trigger a journey | Track
-};
-
-const CONFIG_CATEGORIES = {
- IDENTIFY: { endPoint: endpoints.addContactUrl, name: 'APIdentifyConfig' },
- TRACK: { endPoint: endpoints.triggerJourneyUrl, name: 'APTrackConfig' },
-};
-const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname);
-const DESTINATION = 'autopilot';
-
-module.exports = {
- MAPPING_CONFIG,
- CONFIG_CATEGORIES,
- DESTINATION,
-};
diff --git a/src/v0/destinations/autopilot/data/APIdentifyConfig.json b/src/v0/destinations/autopilot/data/APIdentifyConfig.json
deleted file mode 100644
index c7fad8b59fb..00000000000
--- a/src/v0/destinations/autopilot/data/APIdentifyConfig.json
+++ /dev/null
@@ -1,47 +0,0 @@
-[
- {
- "destKey": "Email",
- "sourceKeys": ["traits.email", "context.traits.email"],
- "required": false
- },
- {
- "destKey": "FirstName",
- "sourceKeys": [
- "traits.firstName",
- "context.traits.firstName",
- "traits.firstname",
- "context.traits.firstname"
- ],
- "required": false
- },
- {
- "destKey": "LastName",
- "sourceKeys": [
- "traits.lastName",
- "context.traits.lastName",
- "traits.lastname",
- "context.traits.lastname"
- ],
- "required": false
- },
- {
- "destKey": "Phone",
- "sourceKeys": ["traits.phone", "context.traits.phone"],
- "required": false
- },
- {
- "destKey": "Company",
- "sourceKeys": ["traits.company.name", "context.traits.company.name"],
- "required": false
- },
- {
- "destKey": "Status",
- "sourceKeys": ["traits.status", "context.traits.status"],
- "required": false
- },
- {
- "destKey": "LeadSource",
- "sourceKeys": ["traits.LeadSource", "context.traits.LeadSource"],
- "required": false
- }
-]
diff --git a/src/v0/destinations/autopilot/data/APTrackConfig.json b/src/v0/destinations/autopilot/data/APTrackConfig.json
deleted file mode 100644
index ced7dfc24a0..00000000000
--- a/src/v0/destinations/autopilot/data/APTrackConfig.json
+++ /dev/null
@@ -1,7 +0,0 @@
-[
- {
- "destKey": "property",
- "sourceKeys": "properties",
- "required": false
- }
-]
diff --git a/src/v0/destinations/autopilot/transform.js b/src/v0/destinations/autopilot/transform.js
deleted file mode 100644
index e4a2c408af5..00000000000
--- a/src/v0/destinations/autopilot/transform.js
+++ /dev/null
@@ -1,106 +0,0 @@
-const { InstrumentationError, TransformationError } = require('@rudderstack/integrations-lib');
-const { EventType } = require('../../../constants');
-const { CONFIG_CATEGORIES, MAPPING_CONFIG } = require('./config');
-const {
- constructPayload,
- defaultPostRequestConfig,
- defaultRequestConfig,
- getFieldValueFromMessage,
- removeUndefinedAndNullValues,
- simpleProcessRouterDest,
-} = require('../../util');
-
-const { JSON_MIME_TYPE } = require('../../util/constant');
-
-const identifyFields = [
- 'email',
- 'firstname',
- 'firstName',
- 'lastname',
- 'lastName',
- 'phone',
- 'company',
- 'status',
- 'LeadSource',
-];
-
-function responseBuilderSimple(message, category, destination) {
- const payload = constructPayload(message, MAPPING_CONFIG[category.name]);
- if (payload) {
- const response = defaultRequestConfig();
- response.headers = {
- autopilotapikey: destination.Config.apiKey,
- 'Content-Type': JSON_MIME_TYPE,
- Accept: JSON_MIME_TYPE,
- };
- let responseBody;
- let contactIdOrEmail;
- let customPayload;
- switch (message.type) {
- case EventType.IDENTIFY:
- // fix for cases where traits and context.traits is missing
- customPayload = message.traits || message.context.traits || {};
- identifyFields.forEach((value) => {
- delete customPayload[value];
- });
- if (Object.keys(customPayload).length > 0) {
- responseBody = {
- contact: { ...payload, custom: customPayload },
- };
- } else {
- responseBody = {
- contact: { ...payload },
- };
- }
- response.endpoint = category.endPoint;
- break;
- case EventType.TRACK:
- responseBody = { ...payload };
- contactIdOrEmail = getFieldValueFromMessage(message, 'email');
- if (contactIdOrEmail) {
- response.endpoint = `${category.endPoint}/${destination.Config.triggerId}/contact/${contactIdOrEmail}`;
- } else {
- throw new InstrumentationError('Email is required for track calls');
- }
- break;
- default:
- break;
- }
- response.method = defaultPostRequestConfig.requestMethod;
- response.userId = message.anonymousId;
- response.body.JSON = removeUndefinedAndNullValues(responseBody);
- return response;
- }
- // fail-safety for developer error
- throw new TransformationError('Payload could not be constructed');
-}
-
-const processEvent = (message, destination) => {
- if (!message.type) {
- throw new InstrumentationError('invalid message type for autopilot');
- }
- const messageType = message.type;
- let category;
- switch (messageType.toLowerCase()) {
- case EventType.IDENTIFY:
- category = CONFIG_CATEGORIES.IDENTIFY;
- break;
- case EventType.TRACK:
- category = CONFIG_CATEGORIES.TRACK;
- break;
- default:
- throw new InstrumentationError(`message type ${messageType} not supported for autopilot`);
- }
-
- // build the response
- return responseBuilderSimple(message, category, destination);
-};
-
-const process = (event) => processEvent(event.message, event.destination);
-
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = await simpleProcessRouterDest(inputs, process, reqMetadata);
- return respList;
-};
-
-module.exports = { process, processRouterDest };
diff --git a/src/v0/destinations/braze/README.md b/src/v0/destinations/braze/README.md
index 50b5852c5e2..7221902ab86 100644
--- a/src/v0/destinations/braze/README.md
+++ b/src/v0/destinations/braze/README.md
@@ -1,6 +1,6 @@
# Braze Destination
-Implementation in **Javascript**
+Implementation in **TypeScript**
## Configuration
@@ -73,17 +73,30 @@ The Braze API enforces rate limits to ensure system stability. Here are the rate
\*Note: For accounts created after August 22, 2024, the rate limit for `/users/export/ids` is 250 requests per minute.
+#### Monthly Active Users (MAU) plans – CY 24-25, Universal MAU, Web MAU, Mobile MAU
+
+For customers on **Monthly Active Users CY 24-25**, **Universal MAU**, **Web MAU**, or **Mobile MAU** pricing, different limits apply to `/users/track`:
+
+- **Enforcement**: Rate limits are enforced at the **company level**. Workspaces can set hourly limits, but burst limits are shared across all workspaces.
+- **Hourly limits**: Set according to expected data ingestion (e.g. MAU tier, industry, seasonality). Current values are in the Braze dashboard under **Settings** → **APIs and Identifiers** → **API Usage Dashboard**.
+- **Burst limit**: In addition to the hourly limit, Braze enforces a **burst limit** on the number of requests allowed every 3 seconds.
+- **Batch limits**: Each request may include up to **75 updates combined** across attribute, event, and purchase objects (same as the base limits above).
+
+Contact Braze Support or your customer success manager for your account’s hourly and burst limits. See [Braze docs: MAU rate limits](https://www.braze.com/docs/api/endpoints/user_data/post_user_track/#monthly-active-users-cy-24-25-universal-mau-web-mau-and-mobile-mau).
+
#### Monitoring Rate Limits
-Every API response from Braze includes the following headers:
+Every API response from Braze includes the following headers (for non-`429` responses):
- `X-RateLimit-Limit`: Maximum number of requests allowed in the current time window
-- `X-RateLimit-Remaining`: Number of requests remaining in the current time window
-- `X-RateLimit-Reset`: Time at which the current rate limit window resets (UTC epoch seconds)
+- `X-RateLimit-Remaining`: Approximate number of requests remaining in the current window
+- `X-RateLimit-Reset`: Number of seconds until the current window resets
+
+For MAU-plan accounts, these headers reflect the hourly rate limit window. When Braze returns **HTTP 429**, these headers are not included; the response includes `X-Ratelimit-Retry-After` (seconds until retry is allowed) instead.
#### Handling Rate Limit Errors
-If you exceed rate limits, Braze will return a `429 Too Many Requests` status code. The destination implements exponential backoff retry logic to handle these errors.
+If you exceed rate limits, Braze returns **429 Too Many Requests**. The destination uses exponential backoff retry logic to handle these errors.
[Docs Reference](https://braze.com/docs/api/api_limits/#rate-limits-by-request-type)
@@ -97,7 +110,7 @@ If you exceed rate limits, Braze will return a `429 Too Many Requests` status co
- **Batching**: Configurable via `BRAZE_BATCH_IDENTIFY_RESOLUTION` environment variable
- This functionality merges anonymous users (with anonymousId or alias object) with identified users (with userId/external_id -> brazeExternalId)
-```Javascript
+```typescript
// The condition that leads to intermediate identify call:
const brazeExternalID = getDestinationExternalID(message, 'brazeExternalId') || message.userId;
if ((message.anonymousId || isAliasPresent) && brazeExternalID) {
@@ -123,9 +136,12 @@ if ((message.anonymousId || isAliasPresent) && brazeExternalID) {
- Improves throughput for high-volume identity resolution scenarios
- Helps stay within Braze's rate limits (20,000 requests per minute)
-```Javascript
+```typescript
// Batching logic implementation:
-const identifyCallsArrayChunks = lodash.chunk(identifyCallsArray, IDENTIFY_BRAZE_MAX_REQ_COUNT);
+const identifyCallsArrayChunks: BrazeIdentifyCall[][] = lodash.chunk(
+ identifyCallsArray,
+ IDENTIFY_BRAZE_MAX_REQ_COUNT,
+);
const allRequests = identifyCallsArrayChunks.map(async (identifyCallsChunk) => {
const aliasesToIdentify = identifyCallsChunk.flatMap(
(identifyCall) => identifyCall.identifyPayload.aliases_to_identify,
@@ -145,9 +161,9 @@ const allRequests = identifyCallsArrayChunks.map(async (identifyCallsChunk) => {
- `alias_name`: The anonymousId value from the event
- This allows for tracking anonymous users before they are identified
-```Javascript
+```typescript
// Corresponding code
-function setAliasObject(payload, message) {
+function setAliasObject(payload: Record, message: RudderBrazeMessage) {
const integrationsObj = getIntegrationsObj(message, 'BRAZE');
if (
isDefinedAndNotNull(integrationsObj?.alias?.alias_name) &&
@@ -191,12 +207,12 @@ function setAliasObject(payload, message) {
### Proxy Delivery
- **Supported**: Yes
-- **Source Code Path**: `src/v0/destinations/braze/networkHandler.js`
+- **Source Code Path**: `src/v0/destinations/braze/networkHandler.ts`
### User Deletion
- **Supported**: Yes
-- **Source Code Path**: `src/v0/destinations/braze/deleteUsers.js`
+- **Source Code Path**: `src/v0/destinations/braze/deleteUsers.ts`
- Implements the Braze User Delete API to comply with privacy regulations
### Additional Functionalities
@@ -332,7 +348,7 @@ Despite this time-based ordering, the issue here is, the user attributes can end
- **Multiplexing**: NO
- **Conditions for Identity Resolution**:
- ```javascript
+ ```typescript
const integrationsObj = getIntegrationsObj(message, 'BRAZE');
const isAliasPresent = isDefinedAndNotNull(integrationsObj?.alias);
const brazeExternalID = getDestinationExternalID(message, 'brazeExternalId') || message.userId;
diff --git a/src/v0/destinations/braze/batchForTrackAPI.test.js b/src/v0/destinations/braze/batchForTrackAPI.test.js
deleted file mode 100644
index 24a78f47d27..00000000000
--- a/src/v0/destinations/braze/batchForTrackAPI.test.js
+++ /dev/null
@@ -1,277 +0,0 @@
-const { batchForTrackAPI } = require('./util');
-
-describe('batchForTrackAPI', () => {
- // Helper function to create test data
- const createTestAttribute = (externalId, name = 'test_attr') => ({
- external_id: externalId,
- [name]: 'test_value',
- });
-
- const createTestEvent = (externalId, name = 'test_event') => ({
- external_id: externalId,
- name,
- time: '2023-01-01T00:00:00Z',
- });
-
- const createTestPurchase = (externalId, productId = 'test_product') => ({
- external_id: externalId,
- product_id: productId,
- currency: 'USD',
- price: 10.99,
- time: '2023-01-01T00:00:00Z',
- });
-
- describe('Basic functionality', () => {
- it('should handle empty arrays', () => {
- const result = batchForTrackAPI([], [], []);
- expect(result).toEqual([]);
- });
-
- it('should handle single items in each array', () => {
- const attributes = [createTestAttribute('user1')];
- const events = [createTestEvent('user1')];
- const purchases = [createTestPurchase('user1')];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].attributes).toHaveLength(1);
- expect(result[0].events).toHaveLength(1);
- expect(result[0].purchases).toHaveLength(1);
- expect(result[0].externalIds.has('user1')).toBe(true);
- });
-
- it('should handle arrays with different lengths', () => {
- const attributes = [createTestAttribute('user1'), createTestAttribute('user2')];
- const events = [createTestEvent('user1')];
- const purchases = [];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].attributes).toHaveLength(2);
- expect(result[0].events).toHaveLength(1);
- expect(result[0].purchases).toHaveLength(0);
- });
-
- it('should handle null/undefined items in arrays', () => {
- const attributes = [createTestAttribute('user1'), null, undefined];
- const events = [null, createTestEvent('user2')];
- const purchases = [undefined];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].attributes).toHaveLength(1);
- expect(result[0].events).toHaveLength(1);
- expect(result[0].purchases).toHaveLength(0);
- });
- });
-
- describe('Batching by external ID count', () => {
- it('should create multiple chunks when external ID count exceeds 75', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create 76 different external IDs (exceeds TRACK_BRAZE_MAX_EXTERNAL_ID_COUNT = 75)
- for (let i = 1; i <= 76; i++) {
- attributes.push(createTestAttribute(`user${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(2);
- expect(result[0].externalIds.size).toBe(75);
- expect(result[1].externalIds.size).toBe(1);
- expect(result[0].attributes).toHaveLength(75);
- expect(result[1].attributes).toHaveLength(1);
- });
-
- it('should group items by external ID correctly', () => {
- const attributes = [
- createTestAttribute('user1', 'attr1'),
- createTestAttribute('user1', 'attr2'),
- createTestAttribute('user2', 'attr1'),
- ];
- const events = [createTestEvent('user1', 'event1'), createTestEvent('user2', 'event1')];
- const purchases = [createTestPurchase('user1')];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].externalIds.size).toBe(2);
- expect(result[0].attributes).toHaveLength(3);
- expect(result[0].events).toHaveLength(2);
- expect(result[0].purchases).toHaveLength(1);
- });
- });
-
- describe('Batching by request count per type', () => {
- it('should create multiple chunks when attributes count exceeds 75', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create 76 attributes for the same user (exceeds TRACK_BRAZE_MAX_REQ_COUNT = 75)
- for (let i = 1; i <= 76; i++) {
- attributes.push(createTestAttribute('user1', `attr${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(2);
- expect(result[0].attributes).toHaveLength(75);
- expect(result[1].attributes).toHaveLength(1);
- expect(result[0].externalIds.has('user1')).toBe(true);
- expect(result[1].externalIds.has('user1')).toBe(true);
- });
-
- it('should create multiple chunks when events count exceeds 75', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create 76 events for the same user
- for (let i = 1; i <= 76; i++) {
- events.push(createTestEvent('user1', `event${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(2);
- expect(result[0].events).toHaveLength(75);
- expect(result[1].events).toHaveLength(1);
- });
-
- it('should create multiple chunks when purchases count exceeds 75', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create 76 purchases for the same user
- for (let i = 1; i <= 76; i++) {
- purchases.push(createTestPurchase('user1', `product${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(2);
- expect(result[0].purchases).toHaveLength(75);
- expect(result[1].purchases).toHaveLength(1);
- });
- });
-
- describe('Complex scenarios', () => {
- it('should handle mixed batching scenarios', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create scenario where request count limit is hit first
- // 38 users with 2 attributes each = 76 attributes (exceeds 75 limit)
- // This will create a split based on request count, not external ID count
- for (let i = 1; i <= 38; i++) {
- attributes.push(createTestAttribute(`user${i}`, 'attr1'));
- attributes.push(createTestAttribute(`user${i}`, 'attr2'));
- events.push(createTestEvent(`user${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(2);
- // First chunk should have 75 attributes (hitting the request count limit)
- expect(result[0].attributes).toHaveLength(75);
- // Second chunk should have 1 attribute
- expect(result[1].attributes).toHaveLength(1);
- // External IDs should be distributed across chunks
- expect(result[0].externalIds.size).toBe(38); // 37 users with 2 attrs + 1 user with 1 attr
- expect(result[1].externalIds.size).toBe(1); // 1 user with 1 attr
- });
-
- it('should sort items by external ID', () => {
- const attributes = [
- createTestAttribute('user3'),
- createTestAttribute('user1'),
- createTestAttribute('user2'),
- ];
- const events = [];
- const purchases = [];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- // Items should be sorted by external ID
- expect(result[0].attributes[0].external_id).toBe('user1');
- expect(result[0].attributes[1].external_id).toBe('user2');
- expect(result[0].attributes[2].external_id).toBe('user3');
- });
-
- it('should handle edge case with exactly 75 external IDs', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create exactly 75 external IDs
- for (let i = 1; i <= 75; i++) {
- attributes.push(createTestAttribute(`user${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].externalIds.size).toBe(75);
- expect(result[0].attributes).toHaveLength(75);
- });
-
- it('should handle edge case with exactly 75 items per type', () => {
- const attributes = [];
- const events = [];
- const purchases = [];
-
- // Create exactly 75 items of each type for the same user
- for (let i = 1; i <= 75; i++) {
- attributes.push(createTestAttribute('user1', `attr${i}`));
- events.push(createTestEvent('user1', `event${i}`));
- purchases.push(createTestPurchase('user1', `product${i}`));
- }
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0].attributes).toHaveLength(75);
- expect(result[0].events).toHaveLength(75);
- expect(result[0].purchases).toHaveLength(75);
- expect(result[0].externalIds.size).toBe(1);
- });
- });
-
- describe('Data structure validation', () => {
- it('should return chunks with correct structure', () => {
- const attributes = [createTestAttribute('user1')];
- const events = [createTestEvent('user1')];
- const purchases = [createTestPurchase('user1')];
-
- const result = batchForTrackAPI(attributes, events, purchases);
-
- expect(result).toHaveLength(1);
- expect(result[0]).toHaveProperty('attributes');
- expect(result[0]).toHaveProperty('events');
- expect(result[0]).toHaveProperty('purchases');
- expect(result[0]).toHaveProperty('externalIds');
- expect(result[0].externalIds).toBeInstanceOf(Set);
- });
-
- it('should preserve original data structure in chunks', () => {
- const originalAttribute = createTestAttribute('user1', 'custom_attr');
- const originalEvent = createTestEvent('user1', 'custom_event');
- const originalPurchase = createTestPurchase('user1', 'custom_product');
-
- const result = batchForTrackAPI([originalAttribute], [originalEvent], [originalPurchase]);
-
- expect(result[0].attributes[0]).toEqual(originalAttribute);
- expect(result[0].events[0]).toEqual(originalEvent);
- expect(result[0].purchases[0]).toEqual(originalPurchase);
- });
- });
-});
diff --git a/src/v0/destinations/braze/batchForTrackAPI.test.ts b/src/v0/destinations/braze/batchForTrackAPI.test.ts
new file mode 100644
index 00000000000..f431a294f9c
--- /dev/null
+++ b/src/v0/destinations/braze/batchForTrackAPI.test.ts
@@ -0,0 +1,492 @@
+import { BrazeEvent, BrazePurchase, BrazeUserAttributes } from './types';
+import { batchForTrackAPI, batchForTrackAPIV2 } from './util';
+
+// Helper function to create test data
+const createTestAttribute = (externalId, name = 'test_attr') => ({
+ external_id: externalId,
+ [name]: 'test_value',
+});
+
+const createTestEvent = (externalId, name = 'test_event') => ({
+ external_id: externalId,
+ name,
+ time: '2023-01-01T00:00:00Z',
+});
+
+const createTestPurchase = (externalId, productId = 'test_product') => ({
+ external_id: externalId,
+ product_id: productId,
+ currency: 'USD',
+ price: 10.99,
+ time: '2023-01-01T00:00:00Z',
+});
+
+describe('batchForTrackAPI', () => {
+ describe('Basic functionality', () => {
+ it('should handle empty arrays', () => {
+ const result = batchForTrackAPI([], [], []);
+ expect(result).toEqual([]);
+ });
+
+ it('should handle single items in each array', () => {
+ const attributes = [createTestAttribute('user1')];
+ const events = [createTestEvent('user1')];
+ const purchases = [createTestPurchase('user1')];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(1);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(1);
+ expect(result[0].externalIds.has('user1')).toBe(true);
+ });
+
+ it('should handle arrays with different lengths', () => {
+ const attributes = [createTestAttribute('user1'), createTestAttribute('user2')];
+ const events = [createTestEvent('user1')];
+ const purchases: BrazePurchase[] = [];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(2);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(0);
+ });
+
+ it('should handle null/undefined items in arrays', () => {
+ const attributes = [
+ createTestAttribute('user1'),
+ null,
+ undefined,
+ ] as unknown as BrazeUserAttributes[];
+ const events = [null, createTestEvent('user2')] as unknown as BrazeEvent[];
+ const purchases = [undefined] as unknown as BrazePurchase[];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(1);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(0);
+ });
+ });
+
+ describe('Batching by external ID count', () => {
+ it('should create multiple chunks when external ID count exceeds 75', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 76 different external IDs (exceeds TRACK_BRAZE_MAX_EXTERNAL_ID_COUNT = 75)
+ for (let i = 1; i <= 76; i++) {
+ attributes.push(createTestAttribute(`user${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].externalIds.size).toBe(75);
+ expect(result[1].externalIds.size).toBe(1);
+ expect(result[0].attributes).toHaveLength(75);
+ expect(result[1].attributes).toHaveLength(1);
+ });
+
+ it('should group items by external ID correctly', () => {
+ const attributes = [
+ createTestAttribute('user1', 'attr1'),
+ createTestAttribute('user1', 'attr2'),
+ createTestAttribute('user2', 'attr1'),
+ ];
+ const events = [createTestEvent('user1', 'event1'), createTestEvent('user2', 'event1')];
+ const purchases = [createTestPurchase('user1')];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].externalIds.size).toBe(2);
+ expect(result[0].attributes).toHaveLength(3);
+ expect(result[0].events).toHaveLength(2);
+ expect(result[0].purchases).toHaveLength(1);
+ });
+ });
+
+ describe('Batching by request count per type', () => {
+ it('should create multiple chunks when attributes count exceeds 75', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 76 attributes for the same user (exceeds TRACK_BRAZE_MAX_REQ_COUNT = 75)
+ for (let i = 1; i <= 76; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].attributes).toHaveLength(75);
+ expect(result[1].attributes).toHaveLength(1);
+ expect(result[0].externalIds.has('user1')).toBe(true);
+ expect(result[1].externalIds.has('user1')).toBe(true);
+ });
+
+ it('should create multiple chunks when events count exceeds 75', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 76 events for the same user
+ for (let i = 1; i <= 76; i++) {
+ events.push(createTestEvent('user1', `event${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].events).toHaveLength(75);
+ expect(result[1].events).toHaveLength(1);
+ });
+
+ it('should create multiple chunks when purchases count exceeds 75', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 76 purchases for the same user
+ for (let i = 1; i <= 76; i++) {
+ purchases.push(createTestPurchase('user1', `product${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].purchases).toHaveLength(75);
+ expect(result[1].purchases).toHaveLength(1);
+ });
+ });
+
+ describe('Complex scenarios', () => {
+ it('should handle mixed batching scenarios', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create scenario where request count limit is hit first
+ // 38 users with 2 attributes each = 76 attributes (exceeds 75 limit)
+ // This will create a split based on request count, not external ID count
+ for (let i = 1; i <= 38; i++) {
+ attributes.push(createTestAttribute(`user${i}`, 'attr1'));
+ attributes.push(createTestAttribute(`user${i}`, 'attr2'));
+ events.push(createTestEvent(`user${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ // First chunk should have 75 attributes (hitting the request count limit)
+ expect(result[0].attributes).toHaveLength(75);
+ // Second chunk should have 1 attribute
+ expect(result[1].attributes).toHaveLength(1);
+ // External IDs should be distributed across chunks
+ expect(result[0].externalIds.size).toBe(38); // 37 users with 2 attrs + 1 user with 1 attr
+ expect(result[1].externalIds.size).toBe(1); // 1 user with 1 attr
+ });
+
+ it('should sort items by external ID', () => {
+ const attributes = [
+ createTestAttribute('user3'),
+ createTestAttribute('user1'),
+ createTestAttribute('user2'),
+ ];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ // Items should be sorted by external ID
+ expect((result[0].attributes[0] as { external_id: string }).external_id).toBe('user1');
+ expect((result[0].attributes[1] as { external_id: string }).external_id).toBe('user2');
+ expect((result[0].attributes[2] as { external_id: string }).external_id).toBe('user3');
+ });
+
+ it('should handle edge case with exactly 75 external IDs', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create exactly 75 external IDs
+ for (let i = 1; i <= 75; i++) {
+ attributes.push(createTestAttribute(`user${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].externalIds.size).toBe(75);
+ expect(result[0].attributes).toHaveLength(75);
+ });
+
+ it('should handle edge case with exactly 75 items per type', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create exactly 75 items of each type for the same user
+ for (let i = 1; i <= 75; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ events.push(createTestEvent('user1', `event${i}`));
+ purchases.push(createTestPurchase('user1', `product${i}`));
+ }
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(75);
+ expect(result[0].events).toHaveLength(75);
+ expect(result[0].purchases).toHaveLength(75);
+ expect(result[0].externalIds.size).toBe(1);
+ });
+ });
+
+ describe('Data structure validation', () => {
+ it('should return chunks with correct structure', () => {
+ const attributes = [createTestAttribute('user1')];
+ const events = [createTestEvent('user1')];
+ const purchases = [createTestPurchase('user1')];
+
+ const result = batchForTrackAPI(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0]).toHaveProperty('attributes');
+ expect(result[0]).toHaveProperty('events');
+ expect(result[0]).toHaveProperty('purchases');
+ expect(result[0]).toHaveProperty('externalIds');
+ expect(result[0].externalIds).toBeInstanceOf(Set);
+ });
+
+ it('should preserve original data structure in chunks', () => {
+ const originalAttribute = createTestAttribute('user1', 'custom_attr');
+ const originalEvent = createTestEvent('user1', 'custom_event');
+ const originalPurchase = createTestPurchase('user1', 'custom_product');
+
+ const result = batchForTrackAPI([originalAttribute], [originalEvent], [originalPurchase]);
+
+ expect(result[0].attributes[0]).toEqual(originalAttribute);
+ expect(result[0].events[0]).toEqual(originalEvent);
+ expect(result[0].purchases[0]).toEqual(originalPurchase);
+ });
+ });
+});
+
+describe('batchForTrackAPIV2', () => {
+ describe('Basic functionality', () => {
+ it('should handle empty arrays', () => {
+ const result = batchForTrackAPIV2([], [], []);
+ expect(result).toEqual([]);
+ });
+
+ it('should handle single items in each array', () => {
+ const attributes = [createTestAttribute('user1')];
+ const events = [createTestEvent('user1')];
+ const purchases = [createTestPurchase('user1')];
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(1);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(1);
+ });
+
+ it('should handle arrays with different lengths', () => {
+ const attributes = [createTestAttribute('user1'), createTestAttribute('user2')];
+ const events = [createTestEvent('user1')];
+ const purchases = [];
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(2);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(0);
+ });
+
+ it('should handle null/undefined items in arrays by filtering them out', () => {
+ const attributes: BrazeUserAttributes[] = [
+ createTestAttribute('user1'),
+ null,
+ undefined,
+ ] as unknown as BrazeUserAttributes[];
+ const events = [null, createTestEvent('user2')] as unknown as BrazeEvent[];
+ const purchases: BrazePurchase[] = [undefined] as unknown as BrazePurchase[];
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(1);
+ expect(result[0].events).toHaveLength(1);
+ expect(result[0].purchases).toHaveLength(0);
+ });
+ });
+
+ describe('Batching by total request count', () => {
+ it('should create chunks based on total combined count, not per-type count', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 30 attributes, 30 events, 30 purchases for the same user
+ // Total = 90 items (exceeds TRACK_BRAZE_MAX_REQ_COUNT = 75)
+ for (let i = 1; i <= 30; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ events.push(createTestEvent('user1', `event${i}`));
+ purchases.push(createTestPurchase('user1', `product${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ // Should split into 2 chunks
+ expect(result).toHaveLength(2);
+
+ // First chunk should have 75 items total
+ const chunk1Size =
+ result[0].attributes.length + result[0].events.length + result[0].purchases.length;
+ expect(chunk1Size).toBe(75);
+
+ // Second chunk should have 15 items total (90 - 75)
+ const chunk2Size =
+ result[1].attributes.length + result[1].events.length + result[1].purchases.length;
+ expect(chunk2Size).toBe(15);
+ });
+
+ it('should split when total count exceeds 75 even with mixed types', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // 25 attributes + 25 events + 26 purchases = 76 items (exceeds 75)
+ for (let i = 1; i <= 25; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ events.push(createTestEvent('user1', `event${i}`));
+ }
+ for (let i = 1; i <= 26; i++) {
+ purchases.push(createTestPurchase('user1', `product${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+
+ const chunk1Size =
+ result[0].attributes.length + result[0].events.length + result[0].purchases.length;
+ expect(chunk1Size).toBe(75);
+
+ const chunk2Size =
+ result[1].attributes.length + result[1].events.length + result[1].purchases.length;
+ expect(chunk2Size).toBe(1);
+ });
+
+ it('should handle edge case with exactly 75 total items', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create exactly 75 items total (25 of each type)
+ for (let i = 1; i <= 25; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ events.push(createTestEvent('user1', `event${i}`));
+ purchases.push(createTestPurchase('user1', `product${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ expect(result[0].attributes).toHaveLength(25);
+ expect(result[0].events).toHaveLength(25);
+ expect(result[0].purchases).toHaveLength(25);
+ });
+
+ it('should create multiple chunks when only attributes exceed 75', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 76 attributes for the same user
+ for (let i = 1; i <= 76; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+ expect(result[0].attributes).toHaveLength(75);
+ expect(result[1].attributes).toHaveLength(1);
+ });
+ });
+
+ describe('Complex scenarios', () => {
+ it('should handle mixed batching with multiple users and total size limit', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 38 users with 1 attribute and 1 event each = 76 total items
+ for (let i = 1; i <= 38; i++) {
+ attributes.push(createTestAttribute(`user${i}`));
+ events.push(createTestEvent(`user${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(2);
+
+ // First chunk should have 75 items total
+ const chunk1Size =
+ result[0].attributes.length + result[0].events.length + result[0].purchases.length;
+ expect(chunk1Size).toBe(75);
+
+ // Second chunk should have 1 item
+ const chunk2Size =
+ result[1].attributes.length + result[1].events.length + result[1].purchases.length;
+ expect(chunk2Size).toBe(1);
+ });
+
+ it('should sort items by external ID before batching', () => {
+ const attributes = [
+ createTestAttribute('user3', 'attr1'),
+ createTestAttribute('user1', 'attr1'),
+ createTestAttribute('user2', 'attr1'),
+ ];
+ const events = [createTestEvent('user3', 'event1'), createTestEvent('user1', 'event1')];
+ const purchases = [];
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(1);
+ // Items should be sorted by external ID, so user1 items come first
+ expect(result[0].attributes[0].external_id).toBe('user1');
+ expect(result[0].events[0].external_id).toBe('user1');
+ });
+
+ it('should handle large datasets requiring multiple chunks', () => {
+ const attributes: BrazeUserAttributes[] = [];
+ const events: BrazeEvent[] = [];
+ const purchases: BrazePurchase[] = [];
+
+ // Create 200 total items (should create 3 chunks: 75 + 75 + 50)
+ for (let i = 1; i <= 200; i++) {
+ attributes.push(createTestAttribute('user1', `attr${i}`));
+ }
+
+ const result = batchForTrackAPIV2(attributes, events, purchases);
+
+ expect(result).toHaveLength(3);
+ expect(result[0].attributes).toHaveLength(75);
+ expect(result[1].attributes).toHaveLength(75);
+ expect(result[2].attributes).toHaveLength(50);
+ });
+ });
+});
diff --git a/src/v0/destinations/braze/braze.util.test.js b/src/v0/destinations/braze/braze.util.test.ts
similarity index 58%
rename from src/v0/destinations/braze/braze.util.test.js
rename to src/v0/destinations/braze/braze.util.test.ts
index bcacb80be13..2eefa92ae20 100644
--- a/src/v0/destinations/braze/braze.util.test.js
+++ b/src/v0/destinations/braze/braze.util.test.ts
@@ -1,6 +1,6 @@
-const _ = require('lodash');
-const { handleHttpRequest } = require('../../../adapters/network');
-const {
+import _ from 'lodash';
+import { handleHttpRequest } from '../../../adapters/network';
+import {
BrazeDedupUtility,
addAppId,
formatGender,
@@ -9,17 +9,28 @@ const {
handleReservedProperties,
combineSubscriptionGroups,
getEndpointFromConfig,
-} = require('./util');
-const { processBatch } = require('./util');
-const {
- removeUndefinedAndNullValues,
- removeUndefinedAndNullAndEmptyValues,
-} = require('../../util');
-const { generateRandomString } = require('@rudderstack/integrations-lib');
+ processBatch,
+} from './util';
+import { removeUndefinedAndNullValues, removeUndefinedAndNullAndEmptyValues } from '../../util';
+import { generateRandomString } from '@rudderstack/integrations-lib';
+import {
+ BrazeDestination,
+ BrazeRouterRequest,
+ BrazeTransformedEvent,
+ BrazeTrackRequestBody,
+ BrazeSubscriptionBatchPayload,
+ BrazeMergeBatchPayload,
+ BrazeSubscriptionGroup,
+ BrazeUserAttributes,
+ BrazeDestinationConfig,
+ RudderBrazeMessage,
+} from './types';
// Mock the handleHttpRequest function
jest.mock('../../../adapters/network');
+const mockedHandleHttpRequest = jest.mocked(handleHttpRequest);
+
describe('dedup utility tests', () => {
describe('prepareInputForDedup', () => {
it('should return an object with empty arrays if no inputs are provided', () => {
@@ -33,7 +44,7 @@ describe('dedup utility tests', () => {
});
it('should extract the userIdIdOnly and add it to externalIdsToQuery array', () => {
- const input = [{ message: { userId: '762123' } }];
+ const input = [{ message: { userId: '762123' } }] as BrazeRouterRequest[];
const expectedOutput = {
externalIdsToQuery: ['762123'],
aliasIdsToQuery: [],
@@ -44,7 +55,9 @@ describe('dedup utility tests', () => {
it('should extract the externalIdOnly and add it to externalIdsToQuery array', () => {
const input = [
- { message: { context: { externalId: [{ type: 'brazeExternalId', id: '54321' }] } } },
+ {
+ message: { context: { externalId: [{ type: 'brazeExternalId', id: '54321' }] } },
+ } as unknown as BrazeRouterRequest,
];
const expectedOutput = {
externalIdsToQuery: ['54321'],
@@ -55,7 +68,7 @@ describe('dedup utility tests', () => {
});
it('should extract the anonymousId and add it to aliasIdsToQuery array', () => {
- const input = [{ message: { anonymousId: 'anon123' } }];
+ const input = [{ message: { anonymousId: 'anon123' } }] as BrazeRouterRequest[];
const expectedOutput = {
externalIdsToQuery: [],
aliasIdsToQuery: ['anon123'],
@@ -68,7 +81,7 @@ describe('dedup utility tests', () => {
const input = [
{ message: { userIdOnly: '123' } },
{ message: { context: { externalId: [{ type: 'brazeExternalId', id: '123' }] } } },
- ];
+ ] as unknown as BrazeRouterRequest[];
const expectedOutput = {
externalIdsToQuery: ['123'],
aliasIdsToQuery: [],
@@ -82,7 +95,7 @@ describe('dedup utility tests', () => {
{ message: { anonymousId: 'anon123' } },
{ message: { anonymousId: 'anon123' } },
{ message: { anonymousId: 'anon456' } },
- ];
+ ] as BrazeRouterRequest[];
const expectedOutput = {
externalIdsToQuery: [],
aliasIdsToQuery: ['anon123', 'anon456'],
@@ -198,12 +211,13 @@ describe('dedup utility tests', () => {
describe('doApiLookup', () => {
beforeEach(() => {
// Clear all instances and calls to handleHttpRequest mock function
- handleHttpRequest.mockClear();
+ mockedHandleHttpRequest.mockClear();
});
it('should return an array of users', async () => {
// Mock the response from handleHttpRequest
- handleHttpRequest.mockResolvedValueOnce({
+ mockedHandleHttpRequest.mockResolvedValueOnce({
+ httpResponse: Promise.resolve({}),
processedResponse: {
status: 200,
response: {
@@ -268,10 +282,13 @@ describe('dedup utility tests', () => {
WorkspaceID: 'workspaceidvalue',
Transformations: [],
IsProcessorEnabled: true,
- };
+ } as unknown as BrazeDestination;
// Call the function
- const users = await BrazeDedupUtility.doApiLookup(identfierChunks, { destination });
+ const users = await BrazeDedupUtility.doApiLookup(identfierChunks, {
+ destination,
+ metadata: {},
+ });
// Check the result - now returns object with users and failedIdentifiers
expect(users).toEqual([
@@ -335,8 +352,8 @@ describe('dedup utility tests', () => {
{
destType: 'braze',
feature: 'transformation',
+ metadata: {},
endpointPath: '/users/export/ids',
- feature: 'transformation',
module: 'router',
requestMethod: 'POST',
},
@@ -351,7 +368,7 @@ describe('dedup utility tests', () => {
restApiKey: generateRandomString(),
dataCenter: 'EU-01',
},
- };
+ } as unknown as BrazeDestination;
// Code randomly generate true or false alsoa with timestamp component
const randomBoolean = () => Math.random() >= 0.5;
@@ -367,71 +384,81 @@ describe('dedup utility tests', () => {
const identifierChunks = _.chunk(identifiers, 50);
// Mock the handleHttpRequest function to return the same data every time it's called
- handleHttpRequest.mockImplementationOnce(() => ({
- processedResponse: {
- status: 200,
- response: {
- users: Array.from({ length: 50 }, (_, i) =>
- removeUndefinedAndNullAndEmptyValues({
- external_id: identifiers[i].external_id,
- user_aliases: [
- removeUndefinedAndNullValues({
- alias_name: identifiers[i].alias_name,
- alias_label: identifiers[i].alias_label,
- }),
- ],
- first_name: `Test-${i}`,
- last_name: 'User',
- }),
- ),
+ mockedHandleHttpRequest.mockImplementationOnce(() =>
+ Promise.resolve({
+ httpResponse: Promise.resolve({}),
+ processedResponse: {
+ status: 200,
+ response: {
+ users: Array.from({ length: 50 }, (_, i) =>
+ removeUndefinedAndNullAndEmptyValues({
+ external_id: identifiers[i].external_id,
+ user_aliases: [
+ removeUndefinedAndNullValues({
+ alias_name: identifiers[i].alias_name,
+ alias_label: identifiers[i].alias_label,
+ }),
+ ],
+ first_name: `Test-${i}`,
+ last_name: 'User',
+ }),
+ ),
+ },
},
- },
- }));
+ }),
+ );
- handleHttpRequest.mockImplementationOnce(() => ({
- processedResponse: {
- status: 200,
- response: {
- users: Array.from({ length: 50 }, (_, i) =>
- removeUndefinedAndNullAndEmptyValues({
- external_id: identifiers[i + 50].external_id,
- user_aliases: [
- removeUndefinedAndNullValues({
- alias_name: identifiers[i + 50].alias_name,
- alias_label: identifiers[i + 50].alias_label,
- }),
- ],
- first_name: `Test-${i + 50}`,
- last_name: 'User',
- }),
- ),
+ mockedHandleHttpRequest.mockImplementationOnce(() =>
+ Promise.resolve({
+ httpResponse: Promise.resolve({}),
+ processedResponse: {
+ status: 200,
+ response: {
+ users: Array.from({ length: 50 }, (_, i) =>
+ removeUndefinedAndNullAndEmptyValues({
+ external_id: identifiers[i + 50].external_id,
+ user_aliases: [
+ removeUndefinedAndNullValues({
+ alias_name: identifiers[i + 50].alias_name,
+ alias_label: identifiers[i + 50].alias_label,
+ }),
+ ],
+ first_name: `Test-${i + 50}`,
+ last_name: 'User',
+ }),
+ ),
+ },
},
- },
- }));
+ }),
+ );
- handleHttpRequest.mockImplementationOnce(() => ({
- processedResponse: {
- status: 200,
- response: {
- users: Array.from({ length: 10 }, (_, i) =>
- removeUndefinedAndNullAndEmptyValues({
- external_id: identifiers[i + 100].external_id,
- user_aliases: [
- removeUndefinedAndNullValues({
- alias_name: identifiers[i + 100].alias_name,
- alias_label: identifiers[i + 100].alias_label,
- }),
- ],
- first_name: `Test-${i + 100}`,
- last_name: 'User',
- }),
- ),
+ mockedHandleHttpRequest.mockImplementationOnce(() =>
+ Promise.resolve({
+ httpResponse: Promise.resolve({}),
+ processedResponse: {
+ status: 200,
+ response: {
+ users: Array.from({ length: 10 }, (_, i) =>
+ removeUndefinedAndNullAndEmptyValues({
+ external_id: identifiers[i + 100].external_id,
+ user_aliases: [
+ removeUndefinedAndNullValues({
+ alias_name: identifiers[i + 100].alias_name,
+ alias_label: identifiers[i + 100].alias_label,
+ }),
+ ],
+ first_name: `Test-${i + 100}`,
+ last_name: 'User',
+ }),
+ ),
+ },
},
- },
- }));
+ }),
+ );
const chunkedUserData = await BrazeDedupUtility.doApiLookup(identifierChunks, {
destination,
+ metadata: {},
});
// Each chunk now returns { users: [...], failedIdentifiers: [] }
// So we need to extract users from each chunk and flatten
@@ -448,7 +475,7 @@ describe('dedup utility tests', () => {
restApiKey: 'test_rest_api_key',
dataCenter: 'EU-01',
},
- };
+ } as unknown as BrazeDestination;
const chunks = [
[
{ external_id: 'user1' },
@@ -459,39 +486,45 @@ describe('dedup utility tests', () => {
];
// Success response for first chunk
- handleHttpRequest.mockImplementationOnce(() => ({
- processedResponse: {
- response: {
- users: [
- {
- external_id: 'user1',
- email: 'user1@example.com',
- },
- {
- alias_name: 'alias1',
- alias_label: 'rudder_id',
- email: 'alias1@example.com',
- },
- {
- external_id: 'user2',
- email: 'user2@example.com',
- },
- ],
+ mockedHandleHttpRequest.mockImplementationOnce(() =>
+ Promise.resolve({
+ httpResponse: Promise.resolve({}),
+ processedResponse: {
+ response: {
+ users: [
+ {
+ external_id: 'user1',
+ email: 'user1@example.com',
+ },
+ {
+ alias_name: 'alias1',
+ alias_label: 'rudder_id',
+ email: 'alias1@example.com',
+ },
+ {
+ external_id: 'user2',
+ email: 'user2@example.com',
+ },
+ ],
+ },
+ status: 200,
},
- status: 200,
- },
- }));
+ }),
+ );
// Failure response for second chunk
- handleHttpRequest.mockImplementationOnce(() => ({
- processedResponse: {
- response: {
- error: 'Failed to fetch users',
+ mockedHandleHttpRequest.mockImplementationOnce(() =>
+ Promise.resolve({
+ httpResponse: Promise.resolve({}),
+ processedResponse: {
+ response: {
+ error: 'Failed to fetch users',
+ },
+ status: 500,
},
- status: 500,
- },
- }));
+ }),
+ );
- const users = await BrazeDedupUtility.doApiLookup(chunks, { destination });
+ const users = await BrazeDedupUtility.doApiLookup(chunks, { destination, metadata: {} });
expect(handleHttpRequest).toHaveBeenCalledTimes(2);
// Assert that the first chunk was successful and the second failed
@@ -553,7 +586,7 @@ describe('dedup utility tests', () => {
{ destination: { Config: { restApiKey: 'xyz' } }, message: { user_id: '456' } },
{ destination: { Config: { restApiKey: 'xyz' } }, message: { anonymousId: 'alias1' } },
{ destination: { Config: { restApiKey: 'xyz' } }, message: { anonymousId: 'alias2' } },
- ];
+ ] as BrazeRouterRequest[];
// call doLookup and verify the output
const result = await BrazeDedupUtility.doLookup(inputs);
@@ -715,23 +748,23 @@ describe('dedup utility tests', () => {
});
test('deduplicates user data correctly', () => {
- const userData = {
+ const userData: BrazeUserAttributes = {
external_id: '123',
color: 'green',
age: 30,
- gender: 'male',
+ gender: 'M',
country: 'US',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
};
const storeData = {
external_id: '123',
country: 'US',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
custom_attributes: {
color: 'blue',
@@ -745,32 +778,32 @@ describe('dedup utility tests', () => {
external_id: '123',
color: 'green',
age: 30,
- gender: 'male',
+ gender: 'M',
country: 'US',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
});
});
test('deduplicates user data correctly 2', () => {
- const userData = {
+ const userData: BrazeUserAttributes = {
external_id: '123',
color: 'green',
age: 30,
- gender: 'male',
+ gender: 'M',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
};
const storeData = {
external_id: '123',
country: 'US',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
custom_attributes: {
color: 'blue',
@@ -784,10 +817,10 @@ describe('dedup utility tests', () => {
external_id: '123',
color: 'green',
age: 30,
- gender: 'male',
+ gender: 'M',
language: 'en',
- email_subscribe: true,
- push_subscribe: false,
+ email_subscribe: 'subscribed',
+ push_subscribe: 'unsubscribed',
subscription_groups: ['group1', 'group2'],
});
});
@@ -969,21 +1002,38 @@ describe('dedup utility tests', () => {
});
});
-describe('processBatch', () => {
+describe('processBatch for workspaces on non MAU plan', () => {
test('processBatch handles more than 75 attributes, events, purchases, subscription_groups and merge users', () => {
// Create input data with more than 75 attributes, events, and purchases
- const transformedEvents = [];
+ const transformedEvents: BrazeTransformedEvent[] = [];
for (let i = 0; i < 100; i++) {
transformedEvents.push({
destination: {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
Config: {
restApiKey: 'restApiKey',
dataCenter: 'US-03',
enableSubscriptionGroupInGroupCall: true,
},
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
},
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
attributes: [{ id: i, name: 'test', xyz: 'abc' }],
@@ -996,7 +1046,7 @@ describe('processBatch', () => {
},
},
},
- metadata: [{ job_id: i }],
+ metadata: [{ job_id: i, workspaceId: 'workspace-non-mau' }],
});
}
@@ -1005,89 +1055,556 @@ describe('processBatch', () => {
// Assert that the response is as expected
expect(result.length).toBe(1); // One successful batched request and one failure response
- expect(result[0].batchedRequest.length).toBe(8); // Two batched requests
- expect(result[0].batchedRequest[0].body.JSON.partner).toBe('RudderStack'); // Verify partner name
- expect(result[0].batchedRequest[0].body.JSON.attributes.length).toBe(75); // First batch contains 75 attributes
- expect(result[0].batchedRequest[0].body.JSON.events.length).toBe(75); // First batch contains 75 events
- expect(result[0].batchedRequest[0].body.JSON.purchases.length).toBe(75); // First batch contains 75 purchases
- expect(result[0].batchedRequest[1].body.JSON.partner).toBe('RudderStack'); // Verify partner name
- expect(result[0].batchedRequest[1].body.JSON.attributes.length).toBe(25); // Second batch contains remaining 25 attributes
- expect(result[0].batchedRequest[1].body.JSON.events.length).toBe(25); // Second batch contains remaining 25 events
- expect(result[0].batchedRequest[1].body.JSON.purchases.length).toBe(25); // Second batch contains remaining 25 purchases
- expect(result[0].batchedRequest[2].body.JSON.subscription_groups.length).toBe(25); // First batch contains 25 subscription group
- expect(result[0].batchedRequest[3].body.JSON.subscription_groups.length).toBe(25); // Second batch contains 25 subscription group
- expect(result[0].batchedRequest[4].body.JSON.subscription_groups.length).toBe(25); // Third batch contains 25 subscription group
- expect(result[0].batchedRequest[5].body.JSON.subscription_groups.length).toBe(25); // Fourth batch contains 25 subscription group
- expect(result[0].batchedRequest[6].body.JSON.merge_updates.length).toBe(50); // First batch contains 50 merge_updates
- expect(result[0].batchedRequest[7].body.JSON.merge_updates.length).toBe(50); // First batch contains 25 merge_updates
+ const firstResult = result[0];
+
+ // Ensure batchedRequest exists and is an array
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+
+ if (firstResult.batchedRequest && Array.isArray(firstResult.batchedRequest)) {
+ expect(firstResult.batchedRequest.length).toBe(8); // Two batched requests
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(75); // First batch contains 75 attributes
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(75); // First batch contains 75 events
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75); // First batch contains 75 purchases
+ expect((firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(25); // Second batch contains remaining 25 attributes
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(25); // Second batch contains remaining 25 events
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(25); // Second batch contains remaining 25 purchases
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // First batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[3].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // Second batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[4].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // Third batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[5].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // Fourth batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[6].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(50); // First batch contains 50 merge_updates
+ expect(
+ (firstResult.batchedRequest[7].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(50); // First batch contains 25 merge_updates
+ }
});
test('processBatch handles more than 75 attributes, events, and purchases with non uniform distribution', () => {
+ const destination: BrazeDestination = {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
+ Config: {
+ restApiKey: 'restApiKey',
+ dataCenter: 'eu',
+ },
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
+ };
// Create input data with more than 75 attributes, events, and purchases
- const transformedEventsSet1 = new Array(120).fill(0).map((_, i) => ({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
+ const transformedEventsSet1: BrazeTransformedEvent[] = new Array(120).fill(0).map((_, i) => ({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ events: [{ id: i, event: 'test', xyz: 'abc' }],
+ },
},
},
+ metadata: [{ job_id: i, workspaceId: 'workspace-non-mau' }],
+ }));
+
+ const transformedEventsSet2: BrazeTransformedEvent[] = new Array(160).fill(0).map((_, i) => ({
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
- events: [{ id: i, event: 'test', xyz: 'abc' }],
+ purchases: [{ id: i, name: 'test', xyz: 'abc' }],
},
},
},
- metadata: [{ job_id: i }],
+ metadata: [{ job_id: 120 + i, workspaceId: 'workspace-non-mau' }],
}));
- const transformedEventsSet2 = new Array(160).fill(0).map((_, i) => ({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
+ const transformedEventsSet3: BrazeTransformedEvent[] = new Array(100).fill(0).map((_, i) => ({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ attributes: [{ id: i, name: 'test', xyz: 'abc' }],
+ },
},
},
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-non-mau' }],
+ }));
+
+ const transformedEventsSet4: BrazeTransformedEvent[] = new Array(70).fill(0).map((_, i) => ({
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
- purchases: [{ id: i, name: 'test', xyz: 'abc' }],
+ subscription_groups: [
+ { subscription_group_id: i, group: 'test', subscription_state: 'abc' },
+ ],
},
},
},
- metadata: [{ job_id: 120 + i }],
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-non-mau' }],
}));
- const transformedEventsSet3 = new Array(100).fill(0).map((_, i) => ({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
+ const transformedEventsSet5: BrazeTransformedEvent[] = new Array(40).fill(0).map((_, i) => ({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ merge_updates: [{ id: i, alias: 'test', xyz: 'abc' }],
+ },
},
},
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-non-mau' }],
+ }));
+
+ // Call the processBatch function
+ const result = processBatch([
+ ...transformedEventsSet1,
+ ...transformedEventsSet2,
+ ...transformedEventsSet3,
+ ...transformedEventsSet4,
+ ...transformedEventsSet5,
+ ]);
+
+ // Assert that the response is as expected
+ expect(result.length).toBe(1); // One successful batched request and one failure response
+ const firstResult = result[0];
+
+ // Ensure batchedRequest exists, is an array, and metadata exists
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+ expect(firstResult.metadata).toBeDefined();
+
+ if (
+ firstResult.batchedRequest &&
+ Array.isArray(firstResult.batchedRequest) &&
+ firstResult.metadata
+ ) {
+ expect(firstResult.metadata.length).toBe(490); // Check the total length is same as input jobs (120 + 160 + 100 + 70 +40)
+ expect(firstResult.batchedRequest.length).toBe(7); // Two batched requests
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(75); // First batch contains 75 attributes
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(75); // First batch contains 75 events
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75); // First batch contains 75 purchases
+ expect((firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(25); // Second batch contains remaining 25 attributes
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(45); // Second batch contains remaining 45 events
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75); // Second batch contains remaining 75 purchases
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(10); // Third batch contains remaining 10 purchases
+ expect(
+ (firstResult.batchedRequest[3].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // First batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[4].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // Second batch contains 25 subscription group
+ expect(
+ (firstResult.batchedRequest[5].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(20); // Third batch contains 20 subscription group
+ expect(
+ (firstResult.batchedRequest[6].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(40); // First batch contains 50 merge_updates
+ }
+ });
+
+ test('check success and failure scenarios both for processBatch', () => {
+ const transformedEvents: BrazeTransformedEvent[] = [];
+ const destination: BrazeDestination = {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
+ Config: {
+ restApiKey: 'restApiKey',
+ dataCenter: 'eu',
+ },
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
+ };
+ let successCount = 0;
+ let failureCount = 0;
+ for (let i = 0; i < 100; i++) {
+ const rando = Math.random() * 100;
+ if (rando < 50) {
+ transformedEvents.push({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ attributes: [{ id: i, name: 'test', xyz: 'abc' }],
+ events: [{ id: i, event: 'test', xyz: 'abc' }],
+ purchases: [{ id: i, purchase: 'test', xyz: 'abc' }],
+ },
+ },
+ },
+ metadata: [{ job_id: i, workspaceId: 'workspace-non-mau' }],
+ });
+ successCount = successCount + 1;
+ } else {
+ transformedEvents.push({
+ destination,
+ statusCode: 400,
+ metadata: [{ job_id: i, workspaceId: 'workspace-non-mau' }],
+ error: 'Random Error',
+ });
+ failureCount = failureCount + 1;
+ }
+ }
+ // Call the processBatch function
+ const result = processBatch(transformedEvents);
+ expect(result.length).toBe(failureCount + 1);
+ const firstResult = result[0];
+
+ // Ensure batchedRequest exists, is an array, and metadata exists
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+ expect(firstResult.metadata).toBeDefined();
+
+ if (
+ firstResult.batchedRequest &&
+ Array.isArray(firstResult.batchedRequest) &&
+ firstResult.metadata
+ ) {
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(successCount);
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(successCount);
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(successCount);
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(firstResult.metadata.length).toBe(successCount);
+ }
+ });
+});
+
+describe('processBatch for workspaces on MAU plan', () => {
+ test('processBatch handles more than 75 attributes, events, purchases, subscription_groups and merge users', () => {
+ // Create input data with more than 75 attributes, events, and purchases
+ const transformedEvents: BrazeTransformedEvent[] = [];
+ for (let i = 0; i < 100; i++) {
+ transformedEvents.push({
+ destination: {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
+ Config: {
+ restApiKey: 'restApiKey',
+ dataCenter: 'US-03',
+ enableSubscriptionGroupInGroupCall: true,
+ },
+ },
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ attributes: [{ id: i, name: 'test', xyz: 'abc' }],
+ events: [{ id: i, event: 'test', xyz: 'abc' }],
+ purchases: [{ id: i, purchase: 'test', xyz: 'abc' }],
+ subscription_groups: [
+ { subscription_group_id: i, group: 'test', subscription_state: 'abc' },
+ ],
+ merge_updates: [{ id: i, alias: 'test', xyz: 'abc' }],
+ },
+ },
+ },
+ metadata: [{ job_id: i, workspaceId: 'workspace-mau' }],
+ });
+ }
+
+ // Call the processBatch function
+ const result = processBatch(transformedEvents);
+ expect(result.length).toBe(1);
+ const firstResult = result[0];
+
+ // Ensure batchedRequest exists, is an array, and metadata exists
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+ expect(firstResult.metadata).toBeDefined();
+
+ if (
+ firstResult.batchedRequest &&
+ Array.isArray(firstResult.batchedRequest) &&
+ firstResult.metadata
+ ) {
+ expect(firstResult.batchedRequest.length).toBe(10);
+ // First batch contains 75 attributes
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(75);
+
+ // Second batch contains 25 attributes and 50 events
+ expect((firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(25);
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(50);
+
+ // Third batch contains 50 events and 25 purchases
+ expect((firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(50);
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(25);
+
+ // Fourth batch contains 75 purchases
+ expect((firstResult.batchedRequest[3].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ ); // Verify partner name
+ expect(
+ (firstResult.batchedRequest[3].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75);
+
+ // Fifth batch contains 25 subscription groups
+ expect(
+ (firstResult.batchedRequest[4].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25);
+ // Sixth batch contains 25 subscription groups
+ expect(
+ (firstResult.batchedRequest[5].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25);
+ // Seventh batch contains 25 subscription groups
+ expect(
+ (firstResult.batchedRequest[6].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25);
+ // Eighth batch contains 25 subscription groups
+ expect(
+ (firstResult.batchedRequest[7].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25);
+
+ // Ninth batch contains 50 merge_updates
+ expect(
+ (firstResult.batchedRequest[8].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(50);
+ // Tenth batch contains 50 merge_updates
+ expect(
+ (firstResult.batchedRequest[9].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(50);
+ }
+ });
+
+ test('processBatch handles more than 75 attributes, events, and purchases with non uniform distribution', () => {
+ const destination: BrazeDestination = {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
+ Config: {
+ restApiKey: 'restApiKey',
+ dataCenter: 'eu',
+ },
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
+ };
+ // Create input data with more than 75 attributes, events, and purchases
+ const transformedEventsSet1: BrazeTransformedEvent[] = new Array(120).fill(0).map((_, i) => ({
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
- attributes: [{ id: i, name: 'test', xyz: 'abc' }],
+ events: [{ id: i, event: 'test', xyz: 'abc' }],
},
},
},
- metadata: [{ job_id: 280 + i }],
+ metadata: [{ job_id: i, workspaceId: 'workspace-mau' }],
}));
- const transformedEventsSet4 = new Array(70).fill(0).map((_, i) => ({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
- enableSubscriptionGroupInGroupCall: true,
+ const transformedEventsSet2: BrazeTransformedEvent[] = new Array(160).fill(0).map((_, i) => ({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ purchases: [{ id: i, name: 'test', xyz: 'abc' }],
+ },
},
},
+ metadata: [{ job_id: 120 + i, workspaceId: 'workspace-mau' }],
+ }));
+
+ const transformedEventsSet3: BrazeTransformedEvent[] = new Array(100).fill(0).map((_, i) => ({
+ destination,
+ statusCode: 200,
+ batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
+ body: {
+ JSON: {
+ attributes: [{ id: i, name: 'test', xyz: 'abc' }],
+ },
+ },
+ },
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-mau' }],
+ }));
+
+ const transformedEventsSet4: BrazeTransformedEvent[] = new Array(70).fill(0).map((_, i) => ({
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
subscription_groups: [
@@ -1096,26 +1613,26 @@ describe('processBatch', () => {
},
},
},
- metadata: [{ job_id: 280 + i }],
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-mau' }],
}));
- const transformedEventsSet5 = new Array(40).fill(0).map((_, i) => ({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
- enableSubscriptionGroupInGroupCall: true,
- },
- },
+ const transformedEventsSet5: BrazeTransformedEvent[] = new Array(40).fill(0).map((_, i) => ({
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
merge_updates: [{ id: i, alias: 'test', xyz: 'abc' }],
},
},
},
- metadata: [{ job_id: 280 + i }],
+ metadata: [{ job_id: 280 + i, workspaceId: 'workspace-mau' }],
}));
// Call the processBatch function
@@ -1128,40 +1645,131 @@ describe('processBatch', () => {
]);
// Assert that the response is as expected
- expect(result.length).toBe(1); // One successful batched request and one failure response
- expect(result[0].metadata.length).toBe(490); // Check the total length is same as input jobs (120 + 160 + 100 + 70 +40)
- expect(result[0].batchedRequest.length).toBe(7); // Two batched requests
- expect(result[0].batchedRequest[0].body.JSON.partner).toBe('RudderStack'); // Verify partner name
- expect(result[0].batchedRequest[0].body.JSON.attributes.length).toBe(75); // First batch contains 75 attributes
- expect(result[0].batchedRequest[0].body.JSON.events.length).toBe(75); // First batch contains 75 events
- expect(result[0].batchedRequest[0].body.JSON.purchases.length).toBe(75); // First batch contains 75 purchases
- expect(result[0].batchedRequest[1].body.JSON.partner).toBe('RudderStack'); // Verify partner name
- expect(result[0].batchedRequest[1].body.JSON.attributes.length).toBe(25); // Second batch contains remaining 25 attributes
- expect(result[0].batchedRequest[1].body.JSON.events.length).toBe(45); // Second batch contains remaining 45 events
- expect(result[0].batchedRequest[1].body.JSON.purchases.length).toBe(75); // Second batch contains remaining 75 purchases
- expect(result[0].batchedRequest[2].body.JSON.purchases.length).toBe(10); // Third batch contains remaining 10 purchases
- expect(result[0].batchedRequest[3].body.JSON.subscription_groups.length).toBe(25); // First batch contains 25 subscription group
- expect(result[0].batchedRequest[4].body.JSON.subscription_groups.length).toBe(25); // Second batch contains 25 subscription group
- expect(result[0].batchedRequest[5].body.JSON.subscription_groups.length).toBe(20); // Third batch contains 20 subscription group
- expect(result[0].batchedRequest[6].body.JSON.merge_updates.length).toBe(40); // First batch contains 50 merge_updates
+ expect(result.length).toBe(1); // One successful batched response
+
+ const firstResult = result[0];
+ // Ensure batchedRequest exists, is an array, and metadata exists
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+ expect(firstResult.metadata).toBeDefined();
+
+ if (
+ firstResult.batchedRequest &&
+ Array.isArray(firstResult.batchedRequest) &&
+ firstResult.metadata
+ ) {
+ expect(firstResult.metadata.length).toBe(490); // Total metadata count: 120 events + 160 purchases + 100 attributes + 70 subscription_groups + 40 merge_updates
+ expect(firstResult.batchedRequest.length).toBe(10); // 10 batched requests total (6 track API batches + 3 subscription batches + 1 merge batch)
+
+ // Track API Batch 1: First 75 attributes (out of 100 total)
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(75);
+
+ // Track API Batch 2: Remaining 25 attributes + 50 events (out of 120 total)
+ expect((firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).attributes?.length,
+ ).toBe(25);
+ expect(
+ (firstResult.batchedRequest[1].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(50);
+
+ // Track API Batch 3: Remaining 70 events + 5 purchases (out of 160 total)
+ expect((firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).events?.length,
+ ).toBe(70);
+ expect(
+ (firstResult.batchedRequest[2].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(5);
+
+ // Track API Batch 4: Next 75 purchases
+ expect((firstResult.batchedRequest[3].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[3].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75);
+
+ // Track API Batch 5: Next 75 purchases
+ expect((firstResult.batchedRequest[4].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[4].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(75);
+
+ // Track API Batch 6: Remaining 5 purchases
+ expect((firstResult.batchedRequest[5].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(
+ (firstResult.batchedRequest[5].body.JSON as BrazeTrackRequestBody).purchases?.length,
+ ).toBe(5);
+
+ // Subscription Groups Batches: 70 total subscription_groups chunked by 25
+ expect(
+ (firstResult.batchedRequest[6].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // First 25
+ expect(
+ (firstResult.batchedRequest[7].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(25); // Next 25
+ expect(
+ (firstResult.batchedRequest[8].body.JSON as BrazeSubscriptionBatchPayload)
+ .subscription_groups?.length,
+ ).toBe(20); // Remaining 20
+
+ // Merge Updates Batch: 40 total merge_updates in single batch
+ expect(
+ (firstResult.batchedRequest[9].body.JSON as BrazeMergeBatchPayload).merge_updates?.length,
+ ).toBe(40);
+ }
});
test('check success and failure scenarios both for processBatch', () => {
- const transformedEvents = [];
+ const transformedEvents: BrazeTransformedEvent[] = [];
let successCount = 0;
let failureCount = 0;
+ const destination: BrazeDestination = {
+ ID: 'braze',
+ Name: 'braze',
+ Enabled: true,
+ Config: {
+ restApiKey: 'restApiKey',
+ dataCenter: 'eu',
+ },
+ DestinationDefinition: {
+ ID: 'braze',
+ Name: 'braze',
+ DisplayName: '',
+ Config: {},
+ },
+ WorkspaceID: '123',
+ Transformations: [],
+ };
for (let i = 0; i < 100; i++) {
const rando = Math.random() * 100;
if (rando < 50) {
transformedEvents.push({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
- },
- },
+ destination,
statusCode: 200,
batchedRequest: {
+ version: '1',
+ type: 'REST',
+ method: 'POST',
+ endpoint: '',
+ headers: {},
+ params: {},
body: {
JSON: {
attributes: [{ id: i, name: 'test', xyz: 'abc' }],
@@ -1170,19 +1778,14 @@ describe('processBatch', () => {
},
},
},
- metadata: [{ job_id: i }],
+ metadata: [{ job_id: i, workspaceId: 'workspace-mau' }],
});
successCount = successCount + 1;
} else {
transformedEvents.push({
- destination: {
- Config: {
- restApiKey: 'restApiKey',
- dataCenter: 'eu',
- },
- },
+ destination,
statusCode: 400,
- metadata: [{ job_id: i }],
+ metadata: [{ job_id: i, workspaceId: 'workspace-mau' }],
error: 'Random Error',
});
failureCount = failureCount + 1;
@@ -1191,11 +1794,23 @@ describe('processBatch', () => {
// Call the processBatch function
const result = processBatch(transformedEvents);
expect(result.length).toBe(failureCount + 1);
- expect(result[0].batchedRequest[0].body.JSON.attributes.length).toBe(successCount);
- expect(result[0].batchedRequest[0].body.JSON.events.length).toBe(successCount);
- expect(result[0].batchedRequest[0].body.JSON.purchases.length).toBe(successCount);
- expect(result[0].batchedRequest[0].body.JSON.partner).toBe('RudderStack');
- expect(result[0].metadata.length).toBe(successCount);
+ const firstResult = result[0];
+
+ // Ensure batchedRequest exists, is an array, and metadata exists
+ expect(firstResult.batchedRequest).toBeDefined();
+ expect(Array.isArray(firstResult.batchedRequest)).toBe(true);
+ expect(firstResult.metadata).toBeDefined();
+
+ if (
+ firstResult.batchedRequest &&
+ Array.isArray(firstResult.batchedRequest) &&
+ firstResult.metadata
+ ) {
+ expect((firstResult.batchedRequest[0].body.JSON as BrazeTrackRequestBody).partner).toBe(
+ 'RudderStack',
+ );
+ expect(firstResult.metadata.length).toBe(successCount);
+ }
});
});
@@ -1239,11 +1854,18 @@ describe('addAppId', () => {
describe('getPurchaseObjs', () => {
test('a single valid product with all required properties', () => {
- const purchaseObjs = getPurchaseObjs({
- properties: { products: [{ product_id: '123', price: 10.99, quantity: 2 }], currency: 'USD' },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
+ const purchaseObjs = getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [{ product_id: '123', price: 10.99, quantity: 2 }],
+ currency: 'USD',
+ },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
expect(purchaseObjs).toEqual([
{
product_id: '123',
@@ -1261,17 +1883,21 @@ describe('getPurchaseObjs', () => {
});
test('multiple valid products with all required properties', () => {
- const purchaseObjs = getPurchaseObjs({
- properties: {
- products: [
- { product_id: '123', price: 10.99, quantity: 2 },
- { product_id: '456', price: 5.49, quantity: 1 },
- ],
- currency: 'EUR',
+ const purchaseObjs = getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [
+ { product_id: '123', price: 10.99, quantity: 2 },
+ { product_id: '456', price: 5.49, quantity: 1 },
+ ],
+ currency: 'EUR',
+ },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
},
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
+ {} as unknown as BrazeDestinationConfig,
+ );
expect(purchaseObjs).toEqual([
{
product_id: '123',
@@ -1302,12 +1928,16 @@ describe('getPurchaseObjs', () => {
test('single product with missing product_id property', () => {
try {
- getPurchaseObjs({
- properties: { products: [{ price: 10.99, quantity: 2 }], currency: 'USD' },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: { products: [{ price: 10.99, quantity: 2 }], currency: 'USD' },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Product Id is missing for product at index: 0',
);
@@ -1316,12 +1946,16 @@ describe('getPurchaseObjs', () => {
test('single product with missing price property', () => {
try {
- getPurchaseObjs({
- properties: { products: [{ product_id: '123', quantity: 2 }], currency: 'USD' },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: { products: [{ product_id: '123', quantity: 2 }], currency: 'USD' },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Price is missing for product at index: 0',
);
@@ -1330,12 +1964,16 @@ describe('getPurchaseObjs', () => {
test('single product with missing quantity property', () => {
try {
- getPurchaseObjs({
- properties: { products: [{ product_id: '123', price: 10.99 }], currency: 'USD' },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: { products: [{ product_id: '123', price: 10.99 }], currency: 'USD' },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Quantity is missing for product at index: 0',
);
@@ -1344,12 +1982,16 @@ describe('getPurchaseObjs', () => {
test('single product with missing currency property', () => {
try {
- getPurchaseObjs({
- properties: { products: [{ product_id: '123', price: 10.99, quantity: 2 }] },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: { products: [{ product_id: '123', price: 10.99, quantity: 2 }] },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Message properties and product at index: 0 is missing currency',
);
@@ -1358,14 +2000,18 @@ describe('getPurchaseObjs', () => {
test('single product with missing timestamp property', () => {
try {
- getPurchaseObjs({
- properties: {
- products: [{ product_id: '123', price: 10.99, quantity: 2 }],
- currency: 'USD',
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [{ product_id: '123', price: 10.99, quantity: 2 }],
+ currency: 'USD',
+ },
+ anonymousId: 'abc',
},
- anonymousId: 'abc',
- });
- } catch (e) {
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Timestamp is missing in the message',
);
@@ -1374,15 +2020,19 @@ describe('getPurchaseObjs', () => {
test('single product with NaN price', () => {
try {
- getPurchaseObjs({
- properties: {
- products: [{ product_id: '123', price: 'abc', quantity: 2 }],
- currency: 'USD',
- },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [{ product_id: '123', price: 'abc', quantity: 2 }],
+ currency: 'USD',
+ } as unknown as RudderBrazeMessage,
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Price is not a number for product at index: 0',
);
@@ -1391,15 +2041,19 @@ describe('getPurchaseObjs', () => {
test('single product with NaN quantity', () => {
try {
- getPurchaseObjs({
- properties: {
- products: [{ product_id: '123', price: 10.99, quantity: 'abc' }],
- currency: 'USD',
- },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [{ product_id: '123', price: 10.99, quantity: 'abc' }],
+ currency: 'USD',
+ },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ } as unknown as RudderBrazeMessage,
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Quantity is not a number for product at index: 0',
);
@@ -1408,14 +2062,18 @@ describe('getPurchaseObjs', () => {
// Test case for a single product with valid currency property
test('single product with valid currency property', () => {
- const purchaseObjs = getPurchaseObjs({
- properties: {
- products: [{ product_id: '123', price: 10.99, quantity: 2 }],
- currency: 'USD',
+ const purchaseObjs = getPurchaseObjs(
+ {
+ type: 'track',
+ properties: {
+ products: [{ product_id: '123', price: 10.99, quantity: 2 }],
+ currency: 'USD',
+ },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
},
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
+ {} as unknown as BrazeDestinationConfig,
+ );
expect(purchaseObjs).toEqual([
{
product_id: '123',
@@ -1434,36 +2092,47 @@ describe('getPurchaseObjs', () => {
test('products not being an array', () => {
try {
- getPurchaseObjs({
- properties: { products: { product_id: '123', price: 10.99, quantity: 2 } },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ properties: { products: { product_id: '123', price: 10.99, quantity: 2 } },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ } as unknown as RudderBrazeMessage,
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual('Invalid Order Completed event: Products is not an array');
}
});
test('empty products array', () => {
try {
- getPurchaseObjs({
- properties: { products: [], currency: 'USD' },
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: { products: [], currency: 'USD' },
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual('Invalid Order Completed event: Products array is empty');
}
});
test('message.properties being undefined', () => {
try {
- getPurchaseObjs({
- properties: undefined,
- timestamp: '2023-08-04T12:34:56Z',
- anonymousId: 'abc',
- });
- } catch (e) {
+ getPurchaseObjs(
+ {
+ type: 'track',
+ properties: undefined,
+ timestamp: '2023-08-04T12:34:56Z',
+ anonymousId: 'abc',
+ },
+ {} as unknown as BrazeDestinationConfig,
+ );
+ } catch (e: any) {
expect(e.message).toEqual(
'Invalid Order Completed event: Properties object is missing in the message',
);
@@ -1473,6 +2142,7 @@ describe('getPurchaseObjs', () => {
test('products having extra properties', () => {
const output = getPurchaseObjs(
{
+ type: 'track',
properties: {
products: [
{ product_id: '123', price: 10.99, quantity: 2, random_extra_property_a: 'abc' },
@@ -1493,7 +2163,7 @@ describe('getPurchaseObjs', () => {
},
{
sendPurchaseEventWithExtraProperties: true,
- },
+ } as unknown as BrazeDestinationConfig,
);
expect(output).toEqual([
{
@@ -1549,6 +2219,7 @@ describe('getPurchaseObjs', () => {
test('products having extra properties with sendPurchaseEventWithExtraProperties as false', () => {
const output = getPurchaseObjs(
{
+ type: 'track',
properties: {
products: [
{ product_id: '123', price: 10.99, quantity: 2, random_extra_property_a: 'abc' },
@@ -1569,7 +2240,7 @@ describe('getPurchaseObjs', () => {
},
{
sendPurchaseEventWithExtraProperties: false,
- },
+ } as unknown as BrazeDestinationConfig,
);
expect(output).toEqual([
{
@@ -1617,6 +2288,7 @@ describe('setAliasObject function', () => {
test('should set user_alias from integrationsObj if alias_name and alias_label are defined', () => {
const payload = {};
const result = setAliasObject(payload, {
+ type: 'track',
anonymousId: '12345',
integrations: {
BRAZE: {
@@ -1638,7 +2310,8 @@ describe('setAliasObject function', () => {
// Test when integrationsObj is missing alias_name or alias_label
test('should set user_alias with anonymousId as alias_name and "rudder_id" as alias_label if integrationsObj does not have alias_name or alias_label', () => {
- const message = {
+ const message: RudderBrazeMessage = {
+ type: 'track',
anonymousId: '12345',
};
const payload = {};
@@ -1654,7 +2327,9 @@ describe('setAliasObject function', () => {
// Test when message has no anonymousId and integrationsObj is missing
test('should return payload unchanged if message has no anonymousId and integrationsObj is missing', () => {
- const message = {};
+ const message: RudderBrazeMessage = {
+ type: 'track',
+ };
const payload = {};
const result = setAliasObject(payload, message);
@@ -1664,6 +2339,7 @@ describe('setAliasObject function', () => {
test('should set user_alias from integrationsObj if alias_name and alias_label are defined', () => {
const payload = {};
const result = setAliasObject(payload, {
+ type: 'track',
anonymousId: '12345',
integrations: {
BRAZE: {
@@ -1686,6 +2362,7 @@ describe('setAliasObject function', () => {
test('should set user_alias from integrationsObj if alias_name and alias_label either is not defined', () => {
const payload = {};
const result = setAliasObject(payload, {
+ type: 'track',
anonymousId: '12345',
integrations: {
BRAZE: {
@@ -1708,6 +2385,7 @@ describe('setAliasObject function', () => {
test('should set user_alias from integrationsObj if alias_name and alias_label either is not defined', () => {
const payload = {};
const result = setAliasObject(payload, {
+ type: 'track',
anonymousId: '12345',
userID: 'user123',
integrations: {
@@ -1757,17 +2435,17 @@ describe('handleReservedProperties', () => {
// Input object is null or undefined
it('should return an empty object when input object is null', () => {
- const props = null;
+ const props = null as unknown as Record;
const result = handleReservedProperties(props);
expect(result).toEqual({});
});
// Handles non-object inputs gracefully
it('should return an empty object when a non-object input is provided', () => {
- const props = 'not an object';
+ const props = 'not an object' as unknown as Record;
try {
handleReservedProperties(props);
- } catch (e) {
+ } catch (e: any) {
expect(e.message).toBe('Invalid event properties');
}
});
@@ -1792,27 +2470,27 @@ describe('handleReservedProperties', () => {
describe('combineSubscriptionGroups', () => {
it('should merge external_ids, emails, and phones for the same subscription_group_id and subscription_state', () => {
- const input = [
+ const input: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1', 'id2'],
emails: ['email1@example.com', 'email2@example.com'],
phones: ['+1234567890', '+0987654321'],
},
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id2', 'id3'],
emails: ['email2@example.com', 'email3@example.com'],
phones: ['+1234567890', '+1122334455'],
},
];
- const expectedOutput = [
+ const expectedOutput: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1', 'id2', 'id3'],
emails: ['email1@example.com', 'email2@example.com', 'email3@example.com'],
phones: ['+1234567890', '+0987654321', '+1122334455'],
@@ -1824,28 +2502,28 @@ describe('combineSubscriptionGroups', () => {
});
it('should handle groups with missing external_ids, emails, or phones', () => {
- const input = [
+ const input: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
},
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
emails: ['email1@example.com'],
},
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
phones: ['+1234567890'],
},
];
- const expectedOutput = [
+ const expectedOutput: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
emails: ['email1@example.com'],
phones: ['+1234567890'],
@@ -1857,29 +2535,29 @@ describe('combineSubscriptionGroups', () => {
});
it('should handle multiple unique subscription groups', () => {
- const input = [
+ const input: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
},
{
subscription_group_id: 'group2',
- subscription_state: 'Unsubscribed',
+ subscription_state: 'unsubscribed',
external_ids: ['id2'],
emails: ['email2@example.com'],
},
];
- const expectedOutput = [
+ const expectedOutput: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
},
{
subscription_group_id: 'group2',
- subscription_state: 'Unsubscribed',
+ subscription_state: 'unsubscribed',
external_ids: ['id2'],
emails: ['email2@example.com'],
},
@@ -1890,18 +2568,18 @@ describe('combineSubscriptionGroups', () => {
});
it('should not include undefined fields in the output', () => {
- const input = [
+ const input: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
},
];
- const expectedOutput = [
+ const expectedOutput: BrazeSubscriptionGroup[] = [
{
subscription_group_id: 'group1',
- subscription_state: 'Subscribed',
+ subscription_state: 'subscribed',
external_ids: ['id1'],
},
];
@@ -1912,47 +2590,54 @@ describe('combineSubscriptionGroups', () => {
});
describe('getEndpointFromConfig', () => {
- const testCases = [
+ type TestCase = {
+ name: string;
+ input: BrazeDestination;
+ expected?: string;
+ throws?: boolean;
+ errorMessage?: string;
+ };
+ const testCases: TestCase[] = [
{
name: 'returns correct EU endpoint',
- input: { Config: { dataCenter: 'EU-02' } },
+ input: { Config: { dataCenter: 'EU-02' } } as unknown as BrazeDestination,
expected: 'https://rest.fra-02.braze.eu',
},
{
name: 'returns correct US endpoint',
- input: { Config: { dataCenter: 'US-03' } },
+ input: { Config: { dataCenter: 'US-03' } } as unknown as BrazeDestination,
expected: 'https://rest.iad-03.braze.com',
},
{
name: 'returns correct AU endpoint',
- input: { Config: { dataCenter: 'AU-01' } },
+ input: { Config: { dataCenter: 'AU-01' } } as unknown as BrazeDestination,
expected: 'https://rest.au-01.braze.com',
},
{
name: 'handles lowercase input correctly',
- input: { Config: { dataCenter: 'eu-03' } },
+ input: { Config: { dataCenter: 'eu-03' } } as unknown as BrazeDestination,
expected: 'https://rest.fra-03.braze.eu',
},
{
name: 'handles whitespace in input',
- input: { Config: { dataCenter: ' US-02 ' } },
+ input: { Config: { dataCenter: ' US-02 ' } } as unknown as BrazeDestination,
expected: 'https://rest.iad-02.braze.com',
},
{
name: 'throws error for empty dataCenter',
- input: { Config: {} },
+ input: { Config: {} } as unknown as BrazeDestination,
throws: true,
errorMessage: 'Invalid Data Center: valid values are EU, US, AU',
},
{
name: 'throws error for invalid region',
- input: { Config: { dataCenter: 'INVALID-01' } },
+ input: { Config: { dataCenter: 'INVALID-01' } } as unknown as BrazeDestination,
throws: true,
errorMessage: 'Invalid Data Center: INVALID-01, valid values are EU, US, AU',
},
];
- testCases.forEach(({ name, input, expected, throws, errorMessage }) => {
+ testCases.forEach(({ name, input, expected, throws, errorMessage }: TestCase) => {
test(name, () => {
if (throws) {
expect(() => getEndpointFromConfig(input)).toThrow(errorMessage);
diff --git a/src/v0/destinations/braze/config.js b/src/v0/destinations/braze/config.ts
similarity index 79%
rename from src/v0/destinations/braze/config.js
rename to src/v0/destinations/braze/config.ts
index 24d710a75cc..41385f35e5a 100644
--- a/src/v0/destinations/braze/config.js
+++ b/src/v0/destinations/braze/config.ts
@@ -1,4 +1,5 @@
-const { getMappingConfig } = require('../../util');
+import { getMappingConfig } from '../../util';
+import type { BrazeEndpointDetails } from './types';
const ConfigCategory = {
IDENTIFY: {
@@ -9,36 +10,40 @@ const ConfigCategory = {
},
};
-function getIdentifyEndpoint(baseEndpoint) {
+function getIdentifyEndpoint(baseEndpoint: string): BrazeEndpointDetails {
return {
endpoint: `${baseEndpoint}/users/identify`,
path: 'users/identify',
};
}
-function getTrackEndPoint(baseEndpoint) {
+function getTrackEndPoint(baseEndpoint: string): BrazeEndpointDetails {
return {
endpoint: `${baseEndpoint}/users/track`,
path: 'users/track',
};
}
-function getSubscriptionGroupEndPoint(baseEndpoint) {
+function getSubscriptionGroupEndPoint(baseEndpoint: string): BrazeEndpointDetails {
return {
endpoint: `${baseEndpoint}/v2/subscription/status/set`,
path: 'v2/subscription/status/set',
};
}
-function getAliasMergeEndPoint(baseEndpoint) {
+function getAliasMergeEndPoint(baseEndpoint: string): BrazeEndpointDetails {
return {
endpoint: `${baseEndpoint}/users/merge`,
path: 'users/merge',
};
}
-const mappingConfig = getMappingConfig(ConfigCategory, __dirname);
-
+const mappingConfig = getMappingConfig(ConfigCategory, __dirname) as Record<
+ string,
+ {
+ [key: string]: Record;
+ }
+>;
const BRAZE_PARTNER_NAME = 'RudderStack';
// max requests per batch
@@ -71,7 +76,7 @@ const BRAZE_NON_BILLABLE_ATTRIBUTES = [
const BRAZE_PURCHASE_STANDARD_PROPERTIES = ['product_id', 'sku', 'price', 'quantity', 'currency'];
-module.exports = {
+export {
ConfigCategory,
mappingConfig,
getIdentifyEndpoint,
diff --git a/src/v0/destinations/braze/deleteUsers.js b/src/v0/destinations/braze/deleteUsers.ts
similarity index 71%
rename from src/v0/destinations/braze/deleteUsers.js
rename to src/v0/destinations/braze/deleteUsers.ts
index a83763e90de..d8cc3763276 100644
--- a/src/v0/destinations/braze/deleteUsers.js
+++ b/src/v0/destinations/braze/deleteUsers.ts
@@ -1,17 +1,18 @@
-const { NetworkError, ConfigurationError } = require('@rudderstack/integrations-lib');
-const { httpPOST } = require('../../../adapters/network');
-const {
- processAxiosResponse,
- getDynamicErrorType,
-} = require('../../../adapters/utils/networkUtils');
-const tags = require('../../util/tags');
-const { isHttpStatusSuccess } = require('../../util');
-const { executeCommonValidations } = require('../../util/regulation-api');
-const { DEL_MAX_BATCH_SIZE } = require('./config');
-const { getUserIdBatches } = require('../../util/deleteUserUtils');
-const { JSON_MIME_TYPE } = require('../../util/constant');
+import { NetworkError, ConfigurationError } from '@rudderstack/integrations-lib';
+import { httpPOST } from '../../../adapters/network';
+import { processAxiosResponse, getDynamicErrorType } from '../../../adapters/utils/networkUtils';
+import tags from '../../util/tags';
+import { isHttpStatusSuccess } from '../../util';
+import { executeCommonValidations } from '../../util/regulation-api';
+import { DEL_MAX_BATCH_SIZE } from './config';
+import { getUserIdBatches } from '../../util/deleteUserUtils';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import type { BrazeDeleteUserEvent, BrazeDestinationConfig } from './types';
-const userDeletionHandler = async (userAttributes, config) => {
+const userDeletionHandler = async (
+ userAttributes: BrazeDeleteUserEvent['userAttributes'],
+ config: BrazeDestinationConfig,
+) => {
if (!config) {
throw new ConfigurationError('Config for deletion not present');
}
@@ -67,11 +68,11 @@ const userDeletionHandler = async (userAttributes, config) => {
return { statusCode: 200, status: 'successful' };
};
-const processDeleteUsers = async (event) => {
+const processDeleteUsers = async (event: BrazeDeleteUserEvent) => {
const { userAttributes, config } = event;
executeCommonValidations(userAttributes);
const resp = await userDeletionHandler(userAttributes, config);
return resp;
};
-module.exports = { processDeleteUsers };
+export { processDeleteUsers };
diff --git a/src/v0/destinations/braze/docs/businesslogic.md b/src/v0/destinations/braze/docs/businesslogic.md
index 7548954b12a..db5d33393a1 100644
--- a/src/v0/destinations/braze/docs/businesslogic.md
+++ b/src/v0/destinations/braze/docs/businesslogic.md
@@ -15,7 +15,7 @@ This document outlines the business logic and mappings used in the Braze destina
1. If identity resolution conditions are met:
- ```javascript
+ ```typescript
const integrationsObj = getIntegrationsObj(message, 'BRAZE');
const isAliasPresent = isDefinedAndNotNull(integrationsObj?.alias);
const brazeExternalID = getDestinationExternalID(message, 'brazeExternalId') || message.userId;
@@ -254,7 +254,7 @@ The Braze destination can generate multiple API calls from a single input event
Input: Identify event meeting specific conditions
Conditions:
- ```javascript
+ ```typescript
const integrationsObj = getIntegrationsObj(message, 'BRAZE');
const isAliasPresent = isDefinedAndNotNull(integrationsObj?.alias);
const brazeExternalID = getDestinationExternalID(message, 'brazeExternalId') || message.userId;
@@ -268,7 +268,7 @@ The Braze destination can generate multiple API calls from a single input event
- API Call 1: POST /users/identify (merge anonymous and identified users)
- API Call 2: POST /users/track (send user attributes)
- Multiplexing: NO (first call is intermediary)
+ - Multiplexing: NO (first call is intermediary)
**Note**: This is not considered true multiplexing as the first call is an intermediary step for identity resolution before the main data delivery. The identify call is only made when either `anonymousId` or a custom alias is present AND either `userId` or `brazeExternalId` is present.
diff --git a/src/v0/destinations/braze/docs/retl.md b/src/v0/destinations/braze/docs/retl.md
index 55da6b1c939..3bebf632032 100644
--- a/src/v0/destinations/braze/docs/retl.md
+++ b/src/v0/destinations/braze/docs/retl.md
@@ -75,7 +75,7 @@ The Braze destination implements special handling for events that come from RETL
- For Identify events from RETL sources, the code includes special logic to override the userId with an externalId from the context if present
- This is implemented in the `adduserIdFromExternalId` function which is only called when `mappedToDestination` is true
-```javascript
+```typescript
// RETL-specific logic for Identify events
case EventType.IDENTIFY: {
category = ConfigCategory.IDENTIFY;
@@ -83,6 +83,7 @@ case EventType.IDENTIFY: {
if (mappedToDestination) {
adduserIdFromExternalId(message);
}
+ break;
}
```
@@ -90,9 +91,13 @@ case EventType.IDENTIFY: {
- When processing user attributes, the code checks for the `MappedToDestinationKey` flag
- If present, it returns the traits as-is without applying the standard mapping logic
-```javascript
+```typescript
// RETL-specific logic for user attributes
-function getUserAttributesObject(message, mappingJson, destination) {
+function getUserAttributesObject(
+ message: RudderBrazeMessage,
+ mappingJson: Record>,
+ destination: BrazeDestination,
+): BrazeUserAttributes {
const traits = getFieldValueFromMessage(message, 'traits');
// return the traits as-is if message is mapped to destination
@@ -116,27 +121,27 @@ function getUserAttributesObject(message, mappingJson, destination) {
### Example RETL Event
-```javascript
+```typescript
// Warehouse record transformed to Braze identify event
-{
- "type": "identify",
- "userId": "user123",
- "traits": {
- "email": "user@example.com",
- "first_name": "John",
- "last_name": "Doe",
- "custom_attribute": "value"
+const retlEvent: RudderBrazeMessage = {
+ type: 'identify',
+ userId: 'user123',
+ traits: {
+ email: 'user@example.com',
+ first_name: 'John',
+ last_name: 'Doe',
+ custom_attribute: 'value',
},
- "context": {
- "mappedToDestination": true,
- "externalId": [
+ context: {
+ mappedToDestination: true,
+ externalId: [
{
- "id": "external_user_123",
- "type": "brazeExternalId"
- }
- ]
- }
-}
+ id: 'external_user_123',
+ type: 'brazeExternalId',
+ },
+ ],
+ },
+};
```
## Rate Limits and Constraints
diff --git a/src/v0/destinations/braze/identityResolutionUtils.test.ts b/src/v0/destinations/braze/identityResolutionUtils.test.ts
index 85899d42c03..f57e76be25f 100644
--- a/src/v0/destinations/braze/identityResolutionUtils.test.ts
+++ b/src/v0/destinations/braze/identityResolutionUtils.test.ts
@@ -5,10 +5,15 @@ import { isHttpStatusSuccess } from '../../util';
import { collectStatsForAliasFailure, getEndpointFromConfig } from './util';
import { getIdentifyEndpoint, IDENTIFY_BRAZE_MAX_REQ_COUNT } from './config';
import * as stats from '../../../util/stats';
-import * as tags from '../../util/tags';
import * as logger from '../../../logger';
import { processBatchedIdentify, processSingleBatch } from './identityResolutionUtils';
-import { Destination } from '../../../types';
+import type {
+ BrazeDestinationConfig,
+ BrazeDestination,
+ BrazeIdentifyCall,
+ BrazeIdentifyRequestBody,
+ BrazeAliasToIdentify,
+} from './types';
// Mock all dependencies
jest.mock('../../../adapters/network');
@@ -48,30 +53,7 @@ const mockedLoggerError = logger.error as jest.MockedFunction;
-// Test data interfaces
-interface BrazeDestinationConfig {
- restApiKey: string;
- dataCenter?: string;
- [key: string]: unknown;
-}
-
-interface AliasToIdentify {
- external_id: string;
- alias_name: string;
- alias_label: string;
-}
-
-interface IdentifyPayload {
- aliases_to_identify: AliasToIdentify[];
- merge_behavior?: string;
-}
-
-interface IdentifyCall {
- identifyPayload: IdentifyPayload;
- destination: Destination;
- metadata: unknown;
-}
-
+// Test-specific response interfaces
interface BrazePartialError {
type?: string;
input_array?: string;
@@ -90,12 +72,20 @@ interface BrazeResponse {
// Test fixtures
const createMockDestination = (
overrides: Partial = {},
-): Destination => ({
+): BrazeDestination => ({
ID: 'test-destination-id',
Name: 'Test Braze Destination',
Config: {
restApiKey: 'test-api-key',
dataCenter: 'US-03',
+ enableSubscriptionGroupInGroupCall: false,
+ sendPurchaseEventWithExtraProperties: false,
+ enableNestedArrayOperations: false,
+ supportDedup: false,
+ trackAnonymousUser: false,
+ enableIdentifyForAnonymousUser: false,
+ blacklistedEvents: [],
+ whitelistedEvents: [],
...overrides,
},
DestinationDefinition: {
@@ -109,7 +99,9 @@ const createMockDestination = (
WorkspaceID: 'test-workspace-id',
});
-const createMockAliasToIdentify = (overrides: Partial = {}): AliasToIdentify => ({
+const createMockAliasToIdentify = (
+ overrides: Partial = {},
+): BrazeAliasToIdentify => ({
external_id: 'user123',
alias_name: 'anon456',
alias_label: 'rudder_id',
@@ -119,14 +111,15 @@ const createMockAliasToIdentify = (overrides: Partial = {}): Al
const createMockIdentifyCall = (
aliasCount: number = 1,
destinationOverrides: Partial = {},
-): IdentifyCall => ({
+): BrazeIdentifyCall => ({
identifyPayload: {
aliases_to_identify: Array.from({ length: aliasCount }, (_, i) =>
createMockAliasToIdentify({
- external_id: `user${i + 1}`,
alias_name: `anon${i + 1}`,
+ alias_label: 'rudder_id',
}),
),
+ merge_behavior: 'merge',
},
destination: createMockDestination(destinationOverrides),
metadata: { jobId: 1, userId: 'test-user' },
@@ -832,7 +825,7 @@ describe('identityResolutionUtils', () => {
describe('Integration & Edge Case Tests', () => {
describe('Data Structure Tests', () => {
it('should handle valid IdentifyCall objects with proper structure', async () => {
- const validIdentifyCall: IdentifyCall = {
+ const validIdentifyCall: BrazeIdentifyCall = {
identifyPayload: {
aliases_to_identify: [
{
@@ -856,9 +849,10 @@ describe('identityResolutionUtils', () => {
});
it('should handle empty aliases_to_identify array', async () => {
- const identifyCallWithEmptyAliases: IdentifyCall = {
+ const identifyCallWithEmptyAliases: BrazeIdentifyCall = {
identifyPayload: {
aliases_to_identify: [],
+ merge_behavior: 'merge',
},
destination: createMockDestination(),
metadata: {},
@@ -903,10 +897,11 @@ describe('identityResolutionUtils', () => {
// Missing alias_name and alias_label
},
],
+ merge_behavior: 'merge' as const,
},
destination: createMockDestination(),
metadata: null,
- } as any;
+ };
const mockResponse = createMockBrazeResponse(200);
mockedHandleHttpRequest.mockResolvedValue(mockResponse);
@@ -1080,15 +1075,16 @@ describe('identityResolutionUtils', () => {
describe('Type Safety & Interface Tests', () => {
describe('TypeScript Interface Compliance', () => {
it('should handle AliasToIdentify interface correctly', async () => {
- const aliasToIdentify: AliasToIdentify = {
+ const aliasToIdentify = createMockAliasToIdentify({
external_id: 'user123',
alias_name: 'anon456',
alias_label: 'rudder_id',
- };
+ });
- const identifyCall: IdentifyCall = {
+ const identifyCall: BrazeIdentifyCall = {
identifyPayload: {
aliases_to_identify: [aliasToIdentify],
+ merge_behavior: 'merge',
},
destination: createMockDestination(),
metadata: {},
@@ -1103,12 +1099,12 @@ describe('identityResolutionUtils', () => {
});
it('should handle IdentifyPayload interface with optional merge_behavior', async () => {
- const identifyPayloadWithMergeBehavior: IdentifyPayload = {
+ const identifyPayloadWithMergeBehavior: BrazeIdentifyRequestBody = {
aliases_to_identify: [createMockAliasToIdentify()],
- merge_behavior: 'none',
+ merge_behavior: 'merge',
};
- const identifyCall: IdentifyCall = {
+ const identifyCall: BrazeIdentifyCall = {
identifyPayload: identifyPayloadWithMergeBehavior,
destination: createMockDestination(),
metadata: {},
@@ -1206,9 +1202,10 @@ describe('identityResolutionUtils', () => {
});
it('should handle zero aliases gracefully', async () => {
- const zeroAliasesCall: IdentifyCall = {
+ const zeroAliasesCall: BrazeIdentifyCall = {
identifyPayload: {
aliases_to_identify: [],
+ merge_behavior: 'merge',
},
destination: createMockDestination(),
metadata: {},
@@ -1283,9 +1280,10 @@ describe('identityResolutionUtils', () => {
});
it('should handle null/undefined metadata', async () => {
- const identifyCallWithNullMetadata: IdentifyCall = {
+ const identifyCallWithNullMetadata: BrazeIdentifyCall = {
identifyPayload: {
aliases_to_identify: [createMockAliasToIdentify()],
+ merge_behavior: 'merge',
},
destination: createMockDestination(),
metadata: null,
diff --git a/src/v0/destinations/braze/identityResolutionUtils.ts b/src/v0/destinations/braze/identityResolutionUtils.ts
index b0af9fc418e..04f19d8f8cc 100644
--- a/src/v0/destinations/braze/identityResolutionUtils.ts
+++ b/src/v0/destinations/braze/identityResolutionUtils.ts
@@ -8,32 +8,7 @@ import { getIdentifyEndpoint, IDENTIFY_BRAZE_MAX_REQ_COUNT } from './config';
import * as stats from '../../../util/stats';
import * as tags from '../../util/tags';
import * as logger from '../../../logger';
-import { Destination } from '../../../types';
-
-interface AliasToIdentify {
- external_id: string;
- alias_name: string;
- alias_label: string;
-}
-
-interface IdentifyPayload {
- aliases_to_identify: AliasToIdentify[];
- merge_behavior?: string;
-}
-
-// -------------------------------
-
-interface BrazeDestinationConfig {
- restApiKey: string;
- dataCenter?: string;
- [key: string]: unknown;
-}
-
-interface IdentifyCall {
- identifyPayload: IdentifyPayload;
- destination: Destination;
- metadata: unknown;
-}
+import type { BrazeIdentifyCall } from './types';
interface BrazePartialError {
type?: string;
@@ -47,6 +22,7 @@ interface BrazeResponse {
message?: string;
errors?: BrazePartialError[];
users?: Record[];
+ aliases_processed?: number;
};
}
@@ -63,7 +39,7 @@ interface BatchIdentifyResult {
* @returns Promise that resolves to BatchIdentifyResult
*/
async function processSingleBatch(
- identifyCallsChunk: IdentifyCall[],
+ identifyCallsChunk: BrazeIdentifyCall[],
destinationId: string,
): Promise {
const { destination } = identifyCallsChunk[0];
@@ -151,7 +127,7 @@ async function processSingleBatch(
* @returns Promise that resolves when all batches are processed
*/
async function processBatchedIdentify(
- identifyCallsArray: IdentifyCall[],
+ identifyCallsArray: BrazeIdentifyCall[],
destinationId: string,
): Promise {
if (!identifyCallsArray || identifyCallsArray.length === 0) {
diff --git a/src/v0/destinations/braze/networkHandler.js b/src/v0/destinations/braze/networkHandler.ts
similarity index 55%
rename from src/v0/destinations/braze/networkHandler.js
rename to src/v0/destinations/braze/networkHandler.ts
index b1363419b31..ad44edf3258 100644
--- a/src/v0/destinations/braze/networkHandler.js
+++ b/src/v0/destinations/braze/networkHandler.ts
@@ -1,17 +1,15 @@
/* eslint-disable no-unused-vars */
-const { NetworkError } = require('@rudderstack/integrations-lib');
-const { isHttpStatusSuccess } = require('../../util/index');
-const { proxyRequest, prepareProxyRequest } = require('../../../adapters/network');
-const {
- getDynamicErrorType,
- processAxiosResponse,
-} = require('../../../adapters/utils/networkUtils');
-const { DESTINATION } = require('./config');
-const tags = require('../../util/tags');
-const stats = require('../../../util/stats');
+import { NetworkError } from '@rudderstack/integrations-lib';
+import { isHttpStatusSuccess } from '../../util/index';
+import { proxyRequest, prepareProxyRequest } from '../../../adapters/network';
+import { getDynamicErrorType, processAxiosResponse } from '../../../adapters/utils/networkUtils';
+import { DESTINATION } from './config';
+import type { BrazeResponseHandlerParams } from './types';
+import tags from '../../util/tags';
+import stats from '../../../util/stats';
// eslint-disable-next-line @typescript-eslint/no-unused-vars
-const responseHandler = (responseParams) => {
+const responseHandler = (responseParams: BrazeResponseHandlerParams) => {
const { destinationResponse } = responseParams;
const message = `Request for ${DESTINATION} Processed Successfully`;
const { response, status } = destinationResponse;
@@ -28,22 +26,12 @@ const responseHandler = (responseParams) => {
}
// Partial errors
- if (
- !!response &&
- response.message === 'success' &&
- response.errors &&
- response.errors.length > 0
- ) {
+ if (response?.message === 'success' && response?.errors && response.errors.length > 0) {
stats.increment('braze_partial_failure');
}
// application level errors
- if (
- !!response &&
- response.message !== 'success' &&
- response.errors &&
- response.errors.length > 0
- ) {
+ if (response?.message !== 'success' && response?.errors && response.errors.length > 0) {
throw new NetworkError(
`Request failed for ${DESTINATION} with status: ${status}`,
status,
@@ -60,13 +48,16 @@ const responseHandler = (responseParams) => {
};
};
-function networkHandler() {
+function networkHandler(this: {
+ responseHandler: typeof responseHandler;
+ proxy: typeof proxyRequest;
+ prepareProxy: typeof prepareProxyRequest;
+ processAxiosResponse: typeof processAxiosResponse;
+}) {
this.responseHandler = responseHandler;
this.proxy = proxyRequest;
this.prepareProxy = prepareProxyRequest;
this.processAxiosResponse = processAxiosResponse;
}
-module.exports = {
- networkHandler,
-};
+export { networkHandler };
diff --git a/src/v0/destinations/braze/transform.js b/src/v0/destinations/braze/transform.ts
similarity index 79%
rename from src/v0/destinations/braze/transform.js
rename to src/v0/destinations/braze/transform.ts
index 9a0442c347b..23634ea73e1 100644
--- a/src/v0/destinations/braze/transform.js
+++ b/src/v0/destinations/braze/transform.ts
@@ -1,9 +1,9 @@
/* eslint-disable no-nested-ternary,no-param-reassign */
-const lodash = require('lodash');
-const get = require('get-value');
-const { InstrumentationError, NetworkError } = require('@rudderstack/integrations-lib');
-const { FilteredEventsError } = require('../../util/errorTypes');
-const {
+import lodash from 'lodash';
+import get from 'get-value';
+import { InstrumentationError, NetworkError } from '@rudderstack/integrations-lib';
+import { FilteredEventsError } from '../../util/errorTypes';
+import {
BrazeDedupUtility,
CustomAttributeOperationUtil,
processDeduplication,
@@ -16,11 +16,25 @@ const {
collectStatsForAliasFailure,
collectStatsForAliasMissConfigurations,
handleReservedProperties,
-} = require('./util');
-
-const tags = require('../../util/tags');
-const { EventType, MappedToDestinationKey } = require('../../../constants');
-const {
+ getEndpointFromConfig,
+ formatGender,
+} from './util';
+import type {
+ BrazeDestination,
+ BrazeRouterRequest,
+ BrazeProcessParams,
+ BrazeUserAttributes,
+ BrazeIdentifyRequestBody,
+ BrazeEndpointDetails,
+ BrazeIdentifyCall,
+ RudderBrazeMessage,
+ BrazeUser,
+ BrazeMergeUpdate,
+} from './types';
+
+import tags from '../../util/tags';
+import { EventType, MappedToDestinationKey } from '../../../constants';
+import {
adduserIdFromExternalId,
defaultRequestConfig,
getFieldValueFromMessage,
@@ -32,8 +46,8 @@ const {
isNewStatusCodesAccepted,
getDestinationExternalID,
getIntegrationsObj,
-} = require('../../util');
-const {
+} from '../../util';
+import {
ConfigCategory,
mappingConfig,
getIdentifyEndpoint,
@@ -42,16 +56,22 @@ const {
getAliasMergeEndPoint,
BRAZE_PARTNER_NAME,
CustomAttributeOperationTypes,
-} = require('./config');
-
-const logger = require('../../../logger');
-const { getEndpointFromConfig, formatGender } = require('./util');
-const { handleHttpRequest } = require('../../../adapters/network');
-const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils');
-const { processBatchedIdentify } = require('./identityResolutionUtils');
-const { JSON_MIME_TYPE } = require('../../util/constant');
-
-function buildResponse(message, destination, properties, endpointDetails) {
+ DESTINATION,
+} from './config';
+
+import logger from '../../../logger';
+import { handleHttpRequest } from '../../../adapters/network';
+import { getDynamicErrorType } from '../../../adapters/utils/networkUtils';
+import { processBatchedIdentify } from './identityResolutionUtils';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import { ProcessorTransformationOutput } from '../../../types';
+
+function buildResponse(
+ message: RudderBrazeMessage,
+ destination: BrazeDestination,
+ properties: unknown,
+ endpointDetails: BrazeEndpointDetails,
+) {
const response = defaultRequestConfig();
response.endpoint = endpointDetails.endpoint;
response.endpointPath = endpointDetails.path;
@@ -68,18 +88,18 @@ function buildResponse(message, destination, properties, endpointDetails) {
};
}
-function getIdentifyPayload(message) {
- let payload = {};
+function getIdentifyPayload(message: RudderBrazeMessage): BrazeIdentifyRequestBody {
+ let payload: Partial = {};
payload = setAliasObject(payload, message);
payload = setExternalId(payload, message);
return { aliases_to_identify: [payload], merge_behavior: 'merge' };
}
function populateCustomAttributesWithOperation(
- traits,
- data,
- mergeObjectsUpdateOperation,
- enableNestedArrayOperations,
+ traits: Record>,
+ data: Record>,
+ mergeObjectsUpdateOperation: unknown,
+ enableNestedArrayOperations: unknown,
) {
try {
// add,update,remove on json attributes
@@ -106,15 +126,19 @@ function populateCustomAttributesWithOperation(
}
});
}
- } catch (exp) {
+ } catch (exp: any) {
logger.info('Failure occurred during custom attributes operations', exp);
}
}
// Ref: https://www.braze.com/docs/api/objects_filters/user_attributes_object/
-function getUserAttributesObject(message, mappingJson, destination) {
+function getUserAttributesObject(
+ message: RudderBrazeMessage,
+ mappingJson: Record>,
+ destination: BrazeDestination,
+): BrazeUserAttributes {
// blank output object
- const data = {};
+ const data: Record> = {};
// get traits from message
const traits = getFieldValueFromMessage(message, 'traits');
@@ -175,7 +199,7 @@ function getUserAttributesObject(message, mappingJson, destination) {
traits,
data,
message.properties?.mergeObjectsUpdateOperation,
- destination?.Config.enableNestedArrayOperations,
+ destination.Config.enableNestedArrayOperations,
);
return data;
@@ -189,7 +213,13 @@ function getUserAttributesObject(message, mappingJson, destination) {
* @param {*} message
* @param {*} destination
*/
-async function processIdentify({ message, destination, metadata, identifyCallsArray }) {
+async function processIdentify(params: {
+ message: RudderBrazeMessage;
+ destination: BrazeDestination;
+ metadata?: unknown;
+ identifyCallsArray?: unknown[];
+}) {
+ const { message, destination, metadata, identifyCallsArray } = params;
const identifyPayload = getIdentifyPayload(message);
if (Array.isArray(identifyCallsArray)) {
identifyCallsArray.push({
@@ -236,16 +266,16 @@ async function processIdentify({ message, destination, metadata, identifyCallsAr
}
function processTrackWithUserAttributes(
- message,
- destination,
- mappingJson,
- processParams,
- reqMetadata,
+ message: RudderBrazeMessage,
+ destination: BrazeDestination,
+ mappingJson: Record>,
+ processParams: BrazeProcessParams,
+ reqMetadata: Record,
) {
- let payload = getUserAttributesObject(message, mappingJson);
+ let payload = getUserAttributesObject(message, mappingJson, destination);
if (payload && Object.keys(payload).length > 0) {
payload = setExternalIdOrAliasObject(payload, message);
- const requestJson = { attributes: [payload] };
+ const requestJson: Record = { attributes: [payload] };
if (destination.Config.supportDedup) {
const dedupedAttributePayload = processDeduplication(
processParams.userStore,
@@ -275,20 +305,29 @@ function processTrackWithUserAttributes(
throw new InstrumentationError('No attributes found to update the user profile');
}
-function addMandatoryEventProperties(payload, message) {
- payload.name = message.event;
- payload.time = message.timestamp;
+function addMandatoryEventProperties(
+ payload: Record,
+ message: RudderBrazeMessage,
+) {
+ payload.name = message.event!;
+ payload.time = message.timestamp!;
return payload;
}
-function processTrackEvent(messageType, message, destination, mappingJson, processParams) {
+function processTrackEvent(
+ messageType: string,
+ message: RudderBrazeMessage,
+ destination: BrazeDestination,
+ mappingJson: Record>,
+ processParams: BrazeProcessParams,
+) {
const eventName = message.event;
if (!message.properties) {
message.properties = {};
}
let { properties } = message;
- const requestJson = {
+ const requestJson: Record = {
partner: BRAZE_PARTNER_NAME,
};
@@ -329,7 +368,7 @@ function processTrackEvent(messageType, message, destination, mappingJson, proce
);
}
properties = handleReservedProperties(properties);
- let payload = {};
+ let payload: Record = {};
// mandatory fields
payload = addMandatoryEventProperties(payload, message);
@@ -354,7 +393,7 @@ function processTrackEvent(messageType, message, destination, mappingJson, proce
//
// Ex: If the groupId is 1234, we'll add a attribute to the user object with the
// key `ab_rudder_group_1234` with the value `true`
-function processGroup(message, destination) {
+function processGroup(message: RudderBrazeMessage, destination: BrazeDestination) {
const groupId = getFieldValueFromMessage(message, 'groupId');
if (!groupId) {
throw new InstrumentationError('Invalid groupId');
@@ -365,7 +404,7 @@ function processGroup(message, destination) {
'Message should have traits with subscriptionState, email or phone',
);
}
- const subscriptionGroup = {
+ const subscriptionGroup: Record = {
subscription_group_id: groupId,
};
if (
@@ -405,7 +444,7 @@ function processGroup(message, destination) {
},
};
}
- const groupAttribute = {};
+ const groupAttribute: Record = {};
groupAttribute[`ab_rudder_group_${groupId}`] = true;
setExternalId(groupAttribute, message);
return buildResponse(
@@ -419,9 +458,8 @@ function processGroup(message, destination) {
);
}
-function processAlias(message, destination) {
- const userId = message?.userId;
- const previousId = message?.previousId;
+function processAlias(message: RudderBrazeMessage, destination: BrazeDestination) {
+ const { userId, previousId } = message;
if (!userId) {
throw new InstrumentationError('[BRAZE]: userId is required for alias call');
@@ -431,7 +469,7 @@ function processAlias(message, destination) {
throw new InstrumentationError('[BRAZE]: previousId is required for alias call');
}
- const mergeUpdates = [
+ const mergeUpdates: BrazeMergeUpdate[] = [
{
identifier_to_merge: {
external_id: previousId,
@@ -454,7 +492,14 @@ function processAlias(message, destination) {
);
}
-async function process(event, processParams = { userStore: new Map() }, reqMetadata = {}) {
+async function process(
+ event: BrazeRouterRequest,
+ processParams: BrazeProcessParams = {
+ userStore: new Map(),
+ failedLookupIdentifiers: new Set(),
+ },
+ reqMetadata: Record = {},
+): Promise {
let response;
const { message, destination } = event;
const messageType = message.type.toLowerCase();
@@ -498,7 +543,7 @@ async function process(event, processParams = { userStore: new Map() }, reqMetad
adduserIdFromExternalId(message);
}
- const integrationsObj = getIntegrationsObj(message, 'BRAZE');
+ const integrationsObj = getIntegrationsObj(message, DESTINATION.toUpperCase() as any);
const isAliasPresent = isDefinedAndNotNull(integrationsObj?.alias);
const brazeExternalID =
@@ -534,15 +579,18 @@ async function process(event, processParams = { userStore: new Map() }, reqMetad
return response;
}
-const processRouterDest = async (inputs, reqMetadata) => {
- const userStore = new Map();
- let failedLookupIdentifiers = new Set();
+const processRouterDest = async (
+ inputs: BrazeRouterRequest[],
+ reqMetadata: Record,
+) => {
+ const userStore = new Map();
+ let failedLookupIdentifiers = new Set();
const { destination } = inputs[0];
if (destination.Config.supportDedup) {
- let lookupResult;
+ let lookupResult: { users: BrazeUser[]; failedIdentifiers: Set } | undefined;
try {
lookupResult = await BrazeDedupUtility.doLookup(inputs);
- } catch (error) {
+ } catch (error: any) {
logger.error('Error while fetching user store', error);
}
@@ -557,7 +605,7 @@ const processRouterDest = async (inputs, reqMetadata) => {
(input) => input.message.userId || input.message.anonymousId,
);
- const identifyCallsArray = [];
+ const identifyCallsArray: BrazeIdentifyCall[] = [];
// process each group of events for userId or anonymousId
// if deduplication is enabled process each group of events for a user (userId or anonymousId)
@@ -584,4 +632,4 @@ const processRouterDest = async (inputs, reqMetadata) => {
return processBatch(allTransfomredEvents);
};
-module.exports = { process, processRouterDest };
+export { process, processRouterDest };
diff --git a/src/v0/destinations/braze/types.ts b/src/v0/destinations/braze/types.ts
new file mode 100644
index 00000000000..703e4e579b6
--- /dev/null
+++ b/src/v0/destinations/braze/types.ts
@@ -0,0 +1,300 @@
+import {
+ Destination,
+ RouterTransformationRequestData,
+ RudderMessage,
+ Metadata,
+} from '../../../types';
+import {
+ BatchedRequest,
+ MultiBatchRequestOutput,
+ ProcessorTransformationOutput,
+} from '../../../types/destinationTransformation';
+
+// Braze User Alias Object
+interface BrazeUserAlias {
+ alias_name: string;
+ alias_label: string;
+}
+
+// Braze User Attributes Object
+// Ref: https://www.braze.com/docs/api/objects_filters/user_attributes_object/
+export interface BrazeUserAttributes {
+ external_id?: string;
+ user_alias?: BrazeUserAlias;
+ braze_id?: string;
+ _update_existing_only?: boolean;
+
+ // Standard attributes
+ first_name?: string | null;
+ last_name?: string | null;
+ email?: string | null;
+ dob?: string | null;
+ country?: string | null;
+ home_city?: string | null;
+ language?: string | null;
+ phone?: string | null;
+ email_subscribe?: string | null;
+ push_subscribe?: string | null;
+ image_url?: string | null;
+ gender?: 'M' | 'F' | 'O' | 'N' | 'P' | null;
+
+ custom_attributes?: Record;
+
+ // Custom attributes
+ [key: string]: unknown;
+}
+// Braze Event Object (Complete API specification)
+// Ref: https://www.braze.com/docs/api/objects_filters/event_object/
+export interface BrazeEvent {
+ // User identifiers - at least one is required per Braze API
+ external_id?: string;
+ user_alias?: BrazeUserAlias;
+ braze_id?: string;
+ email?: string;
+ phone?: string;
+
+ // Optional app identifier
+ app_id?: string;
+
+ // Required fields
+ name: string; // Event name (required)
+ time: string; // ISO 8601 datetime or 'yyyy-MM-dd'T'HH:mm:ss:SSSZ' format (required)
+
+ // Optional event properties
+ properties?: Record;
+
+ // Control flags
+ // When using "user_alias", "Update Only" mode is always true
+ _update_existing_only?: boolean;
+}
+
+// Braze Purchase Object (Complete API specification)
+// Ref: https://www.braze.com/docs/api/objects_filters/purchase_object/
+// Revenue from a purchase object is calculated as the product of quantity and price
+export interface BrazePurchase {
+ // User identifiers - at least one is required per Braze API
+ external_id?: string;
+ user_alias?: BrazeUserAlias;
+ braze_id?: string;
+ email?: string;
+ phone?: string;
+
+ // Optional app identifier
+ // Ref: https://www.braze.com/docs/api/identifier_types/#app-identifier
+ app_id?: string;
+
+ // Required purchase fields
+ // Identifier for the purchase (e.g., Product Name or Product Category)
+ product_id: string;
+
+ // ISO 4217 Alphabetic Currency Code (e.g., USD, EUR, JPY)
+ currency: string;
+
+ // Value in the base currency unit (e.g., Dollars for USD, Yen for JPY)
+ price: number;
+
+ // Time of purchase in ISO 8601 format
+ time: string;
+
+ // Optional: Quantity purchased (defaults to 1, must be <= 100)
+ // Note: Braze treats a quantity X as X separate purchases with quantity 1
+ quantity?: number;
+
+ // Optional purchase properties for additional metadata
+ properties?: Record;
+
+ // Control flags
+ // Setting this flag to true puts the API in "Update Only" mode
+ // When using "user_alias", "Update Only" mode is always true
+ _update_existing_only?: boolean;
+}
+
+// Braze Track Request Body
+export interface BrazeTrackRequestBody {
+ partner?: string;
+ attributes?: BrazeUserAttributes[];
+ events?: BrazeEvent[];
+ purchases?: BrazePurchase[];
+}
+
+/**
+ * Alias to identify for user merging
+ * NOTE: At least ONE of external_id, user_alias, or (alias_name + alias_label) is required
+ */
+export interface BrazeAliasToIdentify {
+ external_id?: string;
+ user_alias?: BrazeUserAlias;
+ alias_name?: string;
+ alias_label?: string;
+}
+// Braze Identify Request Body
+export interface BrazeIdentifyRequestBody {
+ aliases_to_identify: Array;
+ merge_behavior: 'merge';
+}
+
+// Braze Subscription Group
+export interface BrazeSubscriptionGroup {
+ subscription_group_id: string;
+ subscription_state: 'subscribed' | 'unsubscribed';
+ external_ids?: string[];
+ emails?: string[];
+ phones?: string[];
+}
+
+export interface BrazeResponseHandlerParams {
+ destinationResponse: {
+ response?: {
+ message?: string;
+ errors?: unknown[];
+ };
+ status: number;
+ };
+}
+
+export interface BrazeUser extends BrazeUserAttributes {
+ user_aliases?: BrazeUserAlias[];
+}
+
+// Braze /users/export/ids API Response
+export interface BrazeUserExportResponse {
+ users: BrazeUser[];
+ message: string;
+ invalid_user_ids?: string[];
+}
+
+export interface BrazeDestinationConfig {
+ restApiKey: string;
+ dataCenter: string;
+ appKey?: string;
+ enableSubscriptionGroupInGroupCall?: boolean;
+ sendPurchaseEventWithExtraProperties?: boolean;
+ enableNestedArrayOperations?: boolean;
+ supportDedup?: boolean;
+ trackAnonymousUser?: boolean;
+ enableIdentifyForAnonymousUser?: boolean;
+ blacklistedEvents?: string[];
+ whitelistedEvents?: string[];
+}
+
+// Product object structure for e-commerce events
+interface BrazeProduct {
+ product_id?: string;
+ sku?: string;
+ price?: number;
+ quantity?: number;
+ currency?: string;
+ [key: string]: unknown; // Allow additional properties
+}
+
+export interface RudderBrazeMessage extends RudderMessage {
+ properties?: {
+ mergeObjectsUpdateOperation?: boolean;
+ products?: BrazeProduct[]; // Array of products for e-commerce events
+ currency?: string; // Currency at the order level
+ [key: string]: unknown; // Allow additional properties
+ };
+ traits?: {
+ phone?: string;
+ email?: string;
+ subscriptionState?: string;
+ [key: string]: unknown; // Allow additional traits
+ };
+ previousId?: string;
+}
+
+export type BrazeDestination = Destination;
+export type BrazeRouterRequest = RouterTransformationRequestData<
+ RudderBrazeMessage,
+ BrazeDestination
+>;
+
+// Process params for router transformation
+export interface BrazeProcessParams {
+ userStore: Map;
+ identifyCallsArray?: BrazeIdentifyCall[];
+ failedLookupIdentifiers: Set;
+}
+
+// Identity resolution types
+export interface BrazeIdentifyCall {
+ identifyPayload: BrazeIdentifyRequestBody;
+ destination: BrazeDestination;
+ metadata: unknown;
+}
+
+// Endpoint response type
+export interface BrazeEndpointDetails {
+ endpoint: string;
+ path: string;
+}
+
+// Braze Subscription Group request body structure
+export interface BrazeSubscriptionBatchPayload {
+ subscription_groups?: unknown[];
+}
+
+// Braze Merge Update Object
+export interface BrazeMergeUpdate {
+ identifier_to_merge: {
+ external_id?: string;
+ };
+ identifier_to_keep: {
+ external_id?: string;
+ };
+}
+
+// Braze Merge Users request body structure
+export interface BrazeMergeBatchPayload {
+ merge_updates?: BrazeMergeUpdate[];
+}
+
+// Union of all possible Braze batch payload types
+export type BrazeBatchPayload =
+ | BrazeTrackRequestBody
+ | BrazeSubscriptionBatchPayload
+ | BrazeMergeBatchPayload;
+
+// Headers type for Braze API requests
+export type BrazeBatchHeaders = {
+ 'Content-Type': string;
+ Accept: string;
+ Authorization: string;
+};
+
+type BrazeBatchParams = Record;
+
+export type BrazeBatchRequest = BatchedRequest<
+ BrazeBatchPayload,
+ BrazeBatchHeaders,
+ BrazeBatchParams
+>;
+
+export type BrazeTransformedEvent = {
+ statusCode: number;
+ batchedRequest?: ProcessorTransformationOutput;
+ metadata?: Partial[];
+ destination: BrazeDestination;
+ error?: string;
+ statTags?: Record;
+ authErrorCategory?: string;
+};
+
+export type BrazeBatchResponse =
+ | MultiBatchRequestOutput<
+ BrazeBatchPayload,
+ BrazeBatchHeaders,
+ BrazeBatchParams,
+ BrazeDestination
+ >
+ | BrazeTransformedEvent;
+
+// Delete user types
+export interface BrazeDeleteUserEvent {
+ userAttributes: Array<{
+ userId?: string;
+ email?: string;
+ phone?: string;
+ }>;
+ config: BrazeDestinationConfig;
+}
diff --git a/src/v0/destinations/braze/util.js b/src/v0/destinations/braze/util.ts
similarity index 60%
rename from src/v0/destinations/braze/util.js
rename to src/v0/destinations/braze/util.ts
index 1e384fec349..91c9875aa9f 100644
--- a/src/v0/destinations/braze/util.js
+++ b/src/v0/destinations/braze/util.ts
@@ -1,11 +1,10 @@
/* eslint-disable no-param-reassign, @typescript-eslint/naming-convention */
-const _ = require('lodash');
-const get = require('get-value');
-const { InstrumentationError, isDefined } = require('@rudderstack/integrations-lib');
-const logger = require('../../../logger');
-const stats = require('../../../util/stats');
-const { handleHttpRequest } = require('../../../adapters/network');
-const {
+import _ from 'lodash';
+import get from 'get-value';
+import { InstrumentationError, isDefined } from '@rudderstack/integrations-lib';
+import stats from '../../../util/stats';
+import { handleHttpRequest } from '../../../adapters/network';
+import {
getDestinationExternalID,
getFieldValueFromMessage,
removeUndefinedAndNullValues,
@@ -13,8 +12,11 @@ const {
isDefinedAndNotNullAndNotEmpty,
defaultRequestConfig,
isHttpStatusSuccess,
-} = require('../../util');
-const {
+ isObject,
+ removeUndefinedValues,
+ getIntegrationsObj,
+} from '../../util';
+import {
BRAZE_NON_BILLABLE_ATTRIBUTES,
TRACK_BRAZE_MAX_EXTERNAL_ID_COUNT,
CustomAttributeOperationTypes,
@@ -25,12 +27,37 @@ const {
ALIAS_BRAZE_MAX_REQ_COUNT,
TRACK_BRAZE_MAX_REQ_COUNT,
BRAZE_PURCHASE_STANDARD_PROPERTIES,
-} = require('./config');
-const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant');
-const { isObject } = require('../../util');
-const { removeUndefinedValues, getIntegrationsObj } = require('../../util');
+ DESTINATION,
+} from './config';
+import { JSON_MIME_TYPE, HTTP_STATUS_CODES } from '../../util/constant';
+import {
+ BrazeDestination,
+ BrazeRouterRequest,
+ BrazeBatchHeaders,
+ BrazeTransformedEvent,
+ BrazeBatchResponse,
+ BrazeBatchRequest,
+ BrazeSubscriptionGroup,
+ BrazeAliasToIdentify,
+ BrazeUserExportResponse,
+ BrazeUser,
+ BrazeUserAttributes,
+ BrazeEvent,
+ BrazePurchase,
+ BrazeDestinationConfig,
+ RudderBrazeMessage,
+ BrazeMergeUpdate,
+} from './types';
+import type { Metadata } from '../../../types';
+
+type TrackChunk = {
+ attributes: BrazeUserAttributes[];
+ events: BrazeEvent[];
+ purchases: BrazePurchase[];
+ externalIds: Set;
+};
-const formatGender = (gender) => {
+const formatGender = (gender: unknown) => {
if (typeof gender !== 'string') {
return null;
}
@@ -53,7 +80,7 @@ const formatGender = (gender) => {
return null;
};
-const getEndpointFromConfig = (destination) => {
+const getEndpointFromConfig = (destination: BrazeDestination) => {
if (!destination.Config?.dataCenter || typeof destination.Config.dataCenter !== 'string') {
throw new InstrumentationError('Invalid Data Center: valid values are EU, US, AU');
}
@@ -79,8 +106,8 @@ const getEndpointFromConfig = (destination) => {
};
// Merges external_ids, emails, and phones for entries with the same subscription_group_id and subscription_state
-const combineSubscriptionGroups = (subscriptionGroups) => {
- const uniqueGroups = {};
+const combineSubscriptionGroups = (subscriptionGroups: BrazeSubscriptionGroup[]) => {
+ const uniqueGroups: Record = {};
subscriptionGroups.forEach((group) => {
const key = `${group.subscription_group_id}-${group.subscription_state}`;
@@ -92,14 +119,15 @@ const combineSubscriptionGroups = (subscriptionGroups) => {
phones: [...(group.phones || [])],
};
} else {
- uniqueGroups[key].external_ids.push(...(group.external_ids || []));
- uniqueGroups[key].emails.push(...(group.emails || []));
- uniqueGroups[key].phones.push(...(group.phones || []));
+ const ug = uniqueGroups[key];
+ ug.external_ids?.push(...(group.external_ids || []));
+ ug.emails?.push(...(group.emails || []));
+ ug.phones?.push(...(group.phones || []));
}
});
return Object.values(uniqueGroups).map((group) => {
- const result = {
+ const result: Record = {
subscription_group_id: group.subscription_group_id,
subscription_state: group.subscription_state,
};
@@ -117,25 +145,27 @@ const combineSubscriptionGroups = (subscriptionGroups) => {
};
const CustomAttributeOperationUtil = {
- customAttributeUpdateOperation(key, data, traits, mergeObjectsUpdateOperation) {
+ customAttributeUpdateOperation(
+ key: string,
+ data: Record,
+ traits: Record,
+ mergeObjectsUpdateOperation: unknown,
+ ) {
data[key] = {};
- const opsResultArray = [];
- for (let i = 0; i < traits[key][CustomAttributeOperationTypes.UPDATE].length; i += 1) {
- const myObj = {
- $identifier_key: traits[key][CustomAttributeOperationTypes.UPDATE][i].identifier,
- $identifier_value:
- traits[key][CustomAttributeOperationTypes.UPDATE][i][
- traits[key][CustomAttributeOperationTypes.UPDATE][i].identifier
- ],
+ const updateArray = traits[key]?.[CustomAttributeOperationTypes.UPDATE];
+ const opsResultArray: unknown[] = [];
+ for (const arrayItem of updateArray) {
+ const item = arrayItem;
+ const myObj: Record> = {
+ $identifier_key: item.identifier,
+ $identifier_value: item[item.identifier],
};
- delete traits[key][CustomAttributeOperationTypes.UPDATE][i][
- traits[key][CustomAttributeOperationTypes.UPDATE][i].identifier
- ];
- delete traits[key][CustomAttributeOperationTypes.UPDATE][i].identifier;
+ delete item[item.identifier];
+ delete item.identifier;
myObj.$new_object = {};
- Object.keys(traits[key][CustomAttributeOperationTypes.UPDATE][i]).forEach((subKey) => {
- myObj.$new_object[subKey] = traits[key][CustomAttributeOperationTypes.UPDATE][i][subKey];
+ Object.keys(item).forEach((subKey) => {
+ myObj.$new_object[subKey] = item[subKey];
});
opsResultArray.push(myObj);
}
@@ -143,34 +173,42 @@ const CustomAttributeOperationUtil = {
data._merge_objects = isDefinedAndNotNull(mergeObjectsUpdateOperation)
? mergeObjectsUpdateOperation
: false;
- data[key][`$${CustomAttributeOperationTypes.UPDATE}`] = opsResultArray;
+ (data[key] as Record)[`$${CustomAttributeOperationTypes.UPDATE}`] =
+ opsResultArray;
},
- customAttributeRemoveOperation(key, data, traits) {
- const opsResultArray = [];
- for (let i = 0; i < traits[key][CustomAttributeOperationTypes.REMOVE].length; i += 1) {
- const myObj = {
- $identifier_key: traits[key][CustomAttributeOperationTypes.REMOVE][i].identifier,
- $identifier_value:
- traits[key][CustomAttributeOperationTypes.REMOVE][i][
- traits[key][CustomAttributeOperationTypes.REMOVE][i].identifier
- ],
+ customAttributeRemoveOperation(
+ key: string,
+ data: Record>,
+ traits: Record,
+ ) {
+ const removeArray = traits[key]?.[CustomAttributeOperationTypes.REMOVE];
+ const opsResultArray: unknown[] = [];
+ for (const arrayItem of removeArray) {
+ const item = arrayItem;
+ const myObj: Record = {
+ $identifier_key: item.identifier,
+ $identifier_value: item[item.identifier],
};
opsResultArray.push(myObj);
}
data[key][`$${CustomAttributeOperationTypes.REMOVE}`] = opsResultArray;
},
- customAttributeAddOperation(key, data, traits) {
+ customAttributeAddOperation(
+ key: string,
+ data: Record>,
+ traits: Record,
+ ) {
data[key][`$${CustomAttributeOperationTypes.ADD}`] =
- traits[key][CustomAttributeOperationTypes.ADD];
+ traits[key]?.[CustomAttributeOperationTypes.ADD];
},
};
const BrazeDedupUtility = {
- prepareInputForDedup(inputs) {
- const externalIds = [];
- const aliasIds = [];
+ prepareInputForDedup(inputs: BrazeRouterRequest[]) {
+ const externalIds: string[] = [];
+ const aliasIds: string[] = [];
for (const input of inputs) {
const { message } = input;
const brazeExternalId = getDestinationExternalID(message, 'brazeExternalId');
@@ -191,8 +229,8 @@ const BrazeDedupUtility = {
return { externalIdsToQuery, aliasIdsToQuery };
},
- prepareChunksForDedup(externalIdsToQuery, aliasIdsToQuery) {
- const identifiers = [];
+ prepareChunksForDedup(externalIdsToQuery: string[], aliasIdsToQuery: string[]) {
+ const identifiers: BrazeAliasToIdentify[] = [];
if (externalIdsToQuery.length > 0) {
externalIdsToQuery.forEach((externalId) => {
identifiers.push({
@@ -228,7 +266,16 @@ const BrazeDedupUtility = {
// 'country' and 'language' not needed because it is not billable so we don't use it
];
},
- async doApiLookup(identfierChunks, { destination, metadata }) {
+ async doApiLookup(
+ identfierChunks: BrazeAliasToIdentify[][],
+ context: { destination: BrazeDestination; metadata: Record },
+ ): Promise<
+ Array<{
+ users: BrazeUser[];
+ failedIdentifiers: string[];
+ }>
+ > {
+ const { destination, metadata } = context;
return Promise.all(
identfierChunks.map(async (ids) => {
const externalIdentifiers = ids.filter((id) => id.external_id);
@@ -264,7 +311,7 @@ const BrazeDedupUtility = {
const failedIdentifiers = [
...externalIdentifiers.map((id) => id.external_id),
...aliasIdentifiers.map((id) => id.alias_name),
- ];
+ ].filter((id): id is string => id !== undefined);
stats.histogram('braze_lookup_failure_identifiers', failedIdentifiers.length, {
http_status: lookUpResponse.status,
destination_id: destination.ID,
@@ -278,7 +325,7 @@ const BrazeDedupUtility = {
destination_id: destination.ID,
},
);
- const { users } = lookUpResponse.response;
+ const { users } = lookUpResponse.response as BrazeUserExportResponse;
return { users: users || [], failedIdentifiers: [] };
}),
);
@@ -291,22 +338,27 @@ const BrazeDedupUtility = {
* @param {*} inputs router transform input events array
* @returns {Promise<{users: Array, failedIdentifiers: Set}>} object containing user objects and failed identifiers
*/
- async doLookup(inputs) {
+ async doLookup(
+ inputs: BrazeRouterRequest[],
+ ): Promise<{ users: BrazeUser[]; failedIdentifiers: Set }> {
const lookupStartTime = new Date();
const { destination, metadata } = inputs[0];
const { externalIdsToQuery, aliasIdsToQuery } = this.prepareInputForDedup(inputs);
- const identfierChunks = this.prepareChunksForDedup(externalIdsToQuery, aliasIdsToQuery);
+ const identfierChunks: BrazeAliasToIdentify[][] = this.prepareChunksForDedup(
+ externalIdsToQuery,
+ aliasIdsToQuery,
+ );
const chunkedResults = await this.doApiLookup(identfierChunks, { destination, metadata });
// Collect all users and failed identifiers from all chunks
- const allUsers = [];
- const failedIdentifiers = new Set();
+ const allUsers: BrazeUser[] = [];
+ const failedIdentifiers = new Set();
chunkedResults.forEach((result) => {
if (result.users) {
allUsers.push(...result.users);
}
if (result.failedIdentifiers) {
- result.failedIdentifiers.forEach((id) => failedIdentifiers.add(id));
+ result.failedIdentifiers.forEach((id: string) => failedIdentifiers.add(id));
}
});
@@ -325,11 +377,11 @@ const BrazeDedupUtility = {
/**
* Updates the user store with the user objects
*
- * @param {*} store
- * @param {*} users
- * @param {*} destinationId
+ * @param store - Map storing user data by identifier
+ * @param users - Array of Braze users from API response
+ * @param destinationId - Destination ID for stats tracking
*/
- updateUserStore(store, users, destinationId) {
+ updateUserStore(store: Map, users: BrazeUser[], destinationId: string) {
if (isDefinedAndNotNull(users) && Array.isArray(users)) {
users.forEach((user) => {
if (user?.external_id) {
@@ -357,23 +409,23 @@ const BrazeDedupUtility = {
* Returns the user object from the store
* if the user object is not present in the store, it returns undefined
*
- * @param {*} store
- * @param {*} identifier
- * @returns {Object | undefined} user object from the store
+ * @param store - Map storing user data by identifier
+ * @param identifier - User identifier (external_id or alias_name)
+ * @returns User object from the store or undefined
*/
- getUserDataFromStore(store, identifier) {
- return store.get(identifier);
+ getUserDataFromStore(store: Map, identifier: unknown): BrazeUser | undefined {
+ return store.get(identifier as string);
},
/**
* Deduplicates the user object with the user object from the store
* returns original user object if the user object is not present in the store
*
- * @param {*} userData
- * @param {*} store
- * @returns {Object} user object with deduplicated custom attributes
+ * @param userData - User attributes to deduplicate
+ * @param store - Map storing user data by identifier
+ * @returns Deduplicated user object or null if no changes
*/
- deduplicate(userData, store) {
+ deduplicate(userData: BrazeUserAttributes, store: Map) {
const excludeKeys = new Set([
'external_id',
'user_alias',
@@ -387,22 +439,22 @@ const BrazeDedupUtility = {
this.getUserDataFromStore(store, user_alias?.alias_name);
if (!storedUserData) {
- store.set(external_id || user_alias, userData);
+ store.set((external_id || user_alias) as string, userData);
return userData;
}
- const customAttributes = storedUserData?.custom_attributes;
+ const customAttributes = storedUserData.custom_attributes;
storedUserData = { ...storedUserData, ...customAttributes };
delete storedUserData.custom_attributes;
- let deduplicatedUserData = {};
+ let deduplicatedUserData: Record = {};
const keys = Object.keys(userData)
.filter((key) => !excludeKeys.has(key))
.filter((key) => !BRAZE_NON_BILLABLE_ATTRIBUTES.includes(key))
.filter((key) => {
if (isObject(userData[key])) {
return !(
- Object.keys(userData[key]).includes('$add') ||
- Object.keys(userData[key]).includes('$update') ||
- Object.keys(userData[key]).includes('$remove')
+ Object.keys(userData[key] as object).includes('$add') ||
+ Object.keys(userData[key] as object).includes('$update') ||
+ Object.keys(userData[key] as object).includes('$remove')
);
}
return true;
@@ -410,14 +462,15 @@ const BrazeDedupUtility = {
if (keys.length > 0) {
keys.forEach((key) => {
+ const sud = storedUserData;
// ref: https://www.braze.com/docs/user_guide/data_and_analytics/custom_data/custom_attributes/#adding-descriptions
// null is a valid value in braze for unsetting, so we need to compare the values only if the key is present in the stored user data
// in case of keys having null values only compare if the key is present in the stored user data
if (userData[key] === null) {
- if (isDefinedAndNotNull(storedUserData[key])) {
+ if (isDefinedAndNotNull(sud[key])) {
deduplicatedUserData[key] = userData[key];
}
- } else if (!_.isEqual(userData[key], storedUserData[key])) {
+ } else if (!_.isEqual(userData[key], sud[key])) {
deduplicatedUserData[key] = userData[key];
}
});
@@ -439,9 +492,9 @@ const BrazeDedupUtility = {
user_alias,
};
const identifier = external_id || user_alias?.alias_name;
- store.set(identifier, { ...storedUserData, ...deduplicatedUserData });
+ store.set(identifier as string, { ...storedUserData, ...deduplicatedUserData });
- return removeUndefinedValues(deduplicatedUserData);
+ return removeUndefinedValues(deduplicatedUserData) as BrazeUserAttributes;
},
};
@@ -450,16 +503,21 @@ const BrazeDedupUtility = {
* returns original user object if the user object is not present in the store
* if user is duplicate, it returns null
*
- * @param {*} userStore
- * @param {*} payload
- * @param {*} destinationId
- * @param {Set} failedLookupIdentifiers - Set of identifiers that failed to lookup due to API failure
- * @returns
+ * @param userStore - Map storing user data by identifier
+ * @param payload - User attributes payload to deduplicate
+ * @param destinationId - Destination ID for stats tracking
+ * @param failedLookupIdentifiers - Set of identifiers that failed to lookup due to API failure
+ * @returns Deduplicated payload or null if duplicate
*/
-const processDeduplication = (userStore, payload, destinationId, failedLookupIdentifiers) => {
+const processDeduplication = (
+ userStore: Map,
+ payload: BrazeUserAttributes,
+ destinationId: string,
+ failedLookupIdentifiers: Set,
+) => {
// Check if this event's identifier failed to lookup due to API failure
const identifier = payload.external_id || payload.user_alias?.alias_name;
- if (failedLookupIdentifiers && failedLookupIdentifiers.has(identifier)) {
+ if (failedLookupIdentifiers && identifier && failedLookupIdentifiers.has(identifier)) {
stats.increment('braze_dedup_skipped_due_to_lookup_failure_count', {
destination_id: destinationId,
});
@@ -468,7 +526,9 @@ const processDeduplication = (userStore, payload, destinationId, failedLookupIde
const dedupedAttributePayload = BrazeDedupUtility.deduplicate(payload, userStore);
if (
isDefinedAndNotNullAndNotEmpty(dedupedAttributePayload) &&
- Object.keys(dedupedAttributePayload).some((key) => !['external_id', 'user_alias'].includes(key))
+ Object.keys(dedupedAttributePayload as BrazeUserAttributes).some(
+ (key) => !['external_id', 'user_alias'].includes(key),
+ )
) {
stats.increment('braze_deduped_users_count', { destination_id: destinationId });
return dedupedAttributePayload;
@@ -477,37 +537,61 @@ const processDeduplication = (userStore, payload, destinationId, failedLookupIde
return null;
};
-function prepareGroupAndAliasBatch(arrayChunks, responseArray, destination, type) {
+function prepareGroupAndAliasBatch({
+ arrayChunks,
+ responseArray,
+ destination,
+ type,
+}:
+ | {
+ arrayChunks: BrazeSubscriptionGroup[][];
+ responseArray: unknown[];
+ destination: BrazeDestination;
+ type: 'subscription';
+ }
+ | {
+ arrayChunks: BrazeMergeUpdate[][];
+ responseArray: unknown[];
+ destination: BrazeDestination;
+ type: 'merge';
+ }) {
const headers = {
'Content-Type': JSON_MIME_TYPE,
Accept: JSON_MIME_TYPE,
Authorization: `Bearer ${destination.Config.restApiKey}`,
};
- for (const chunk of arrayChunks) {
- const response = defaultRequestConfig();
- if (type === 'merge') {
+ // Type narrowing: Check type BEFORE the loop so TypeScript can narrow arrayChunks
+ if (type === 'merge') {
+ // TypeScript now knows arrayChunks is BrazeMergeUpdate[][]
+ for (const chunk of arrayChunks) {
+ const response = defaultRequestConfig();
const { endpoint, path } = getAliasMergeEndPoint(getEndpointFromConfig(destination));
response.endpoint = endpoint;
response.endpointPath = path;
- const merge_updates = chunk;
response.body.JSON = removeUndefinedAndNullValues({
- merge_updates,
+ merge_updates: chunk,
+ });
+ responseArray.push({
+ ...response,
+ headers,
});
- } else if (type === 'subscription') {
+ }
+ } else {
+ // TypeScript now knows arrayChunks is BrazeSubscriptionGroup[][]
+ for (const chunk of arrayChunks) {
+ const response = defaultRequestConfig();
const { endpoint, path } = getSubscriptionGroupEndPoint(getEndpointFromConfig(destination));
response.endpoint = endpoint;
response.endpointPath = path;
- const subscription_groups = chunk;
- // maketool transformed event
- logger.info(`braze subscription chunk ${JSON.stringify(subscription_groups)}`);
- stats.gauge('braze_batch_subscription_size', subscription_groups.length, {
+ stats.gauge('braze_batch_subscription_size', chunk.length, {
destination_id: destination.ID,
});
// Deduplicate the subscription groups before constructing the response body
- const deduplicatedSubscriptionGroups = combineSubscriptionGroups(subscription_groups);
+ // No type casting needed - TypeScript knows chunk is BrazeSubscriptionGroup[]
+ const deduplicatedSubscriptionGroups = combineSubscriptionGroups(chunk);
stats.gauge('braze_batch_subscription_combined_size', deduplicatedSubscriptionGroups.length, {
destination_id: destination.ID,
@@ -516,26 +600,36 @@ function prepareGroupAndAliasBatch(arrayChunks, responseArray, destination, type
response.body.JSON = removeUndefinedAndNullValues({
subscription_groups: deduplicatedSubscriptionGroups,
});
+ responseArray.push({
+ ...response,
+ headers,
+ });
}
- responseArray.push({
- ...response,
- headers,
- });
}
}
-const createTrackChunk = () => ({
+const createTrackChunk = (): TrackChunk => ({
attributes: [],
events: [],
purchases: [],
- externalIds: new Set(),
+ externalIds: new Set(),
});
-const batchForTrackAPI = (attributesArray, eventsArray, purchasesArray) => {
- const allItems = [];
+type AllItems = {
+ data: BrazeUserAttributes | BrazeEvent | BrazePurchase;
+ type: string;
+ externalId?: string;
+};
+
+const batchForTrackAPI = (
+ attributesArray: BrazeUserAttributes[],
+ eventsArray: BrazeEvent[],
+ purchasesArray: BrazePurchase[],
+) => {
+ const allItems: AllItems[] = [];
const maxLength = Math.max(attributesArray.length, eventsArray.length, purchasesArray.length);
- const addItem = (item, type) => {
+ const addItem = (item: AllItems['data'], type: string) => {
if (item) {
allItems.push({
data: item,
@@ -545,10 +639,18 @@ const batchForTrackAPI = (attributesArray, eventsArray, purchasesArray) => {
}
};
- const canAddToChunk = (item, chunk) => {
+ const canAddToChunk = (
+ item: AllItems,
+ chunk: {
+ externalIds: Set;
+ attributes: unknown[];
+ events: unknown[];
+ purchases: unknown[];
+ },
+ ) => {
const { type, externalId } = item;
return (
- (chunk.externalIds.has(externalId) ||
+ ((externalId && chunk.externalIds.has(externalId)) ||
chunk.externalIds.size < TRACK_BRAZE_MAX_EXTERNAL_ID_COUNT) &&
chunk[type].length < TRACK_BRAZE_MAX_REQ_COUNT
);
@@ -562,16 +664,16 @@ const batchForTrackAPI = (attributesArray, eventsArray, purchasesArray) => {
}
const sortedItems = _.sortBy(allItems, 'externalId');
let currentChunk = createTrackChunk();
- const trackChunks = [];
+ const trackChunks: ReturnType[] = [];
for (const item of sortedItems) {
if (canAddToChunk(item, currentChunk)) {
currentChunk[item.type].push(item.data);
- currentChunk.externalIds.add(item.externalId);
+ currentChunk.externalIds.add(item.externalId!);
} else {
trackChunks.push(currentChunk);
currentChunk = createTrackChunk();
currentChunk[item.type].push(item.data);
- currentChunk.externalIds.add(item.externalId);
+ currentChunk.externalIds.add(item.externalId!);
}
}
if (currentChunk.externalIds.size > 0) {
@@ -580,8 +682,66 @@ const batchForTrackAPI = (attributesArray, eventsArray, purchasesArray) => {
return trackChunks;
};
-const cleanTrackChunk = ({ attributes, events, purchases }) => {
- const cleanChunk = {};
+// braze batching as per new MAU plan
+const batchForTrackAPIV2 = (
+ attributesArray: BrazeUserAttributes[],
+ eventsArray: BrazeEvent[],
+ purchasesArray: BrazePurchase[],
+) => {
+ // Collect all items with their types, filtering out null/undefined
+ const allItems: AllItems[] = [
+ ...attributesArray
+ .filter((item) => isDefinedAndNotNull(item))
+ .map((item) => ({
+ data: item,
+ type: 'attributes',
+ externalId: item.external_id,
+ })),
+ ...eventsArray
+ .filter((item) => isDefinedAndNotNull(item))
+ .map((item) => ({ data: item, type: 'events', externalId: item.external_id })),
+ ...purchasesArray
+ .filter((item) => isDefinedAndNotNull(item))
+ .map((item) => ({
+ data: item,
+ type: 'purchases',
+ externalId: item.external_id,
+ })),
+ ];
+
+ const sortedItems: AllItems[] = _.sortBy(allItems, 'externalId');
+ const trackChunks: ReturnType[] = [];
+ let currentChunk = createTrackChunk();
+
+ const getChunkSize = (chunk: ReturnType) =>
+ chunk.attributes.length + chunk.events.length + chunk.purchases.length;
+
+ const addItemToChunk = (item: AllItems, chunk: ReturnType) => {
+ chunk[item.type].push(item.data);
+ };
+
+ for (const item of sortedItems) {
+ if (getChunkSize(currentChunk) === TRACK_BRAZE_MAX_REQ_COUNT) {
+ trackChunks.push(currentChunk);
+ currentChunk = createTrackChunk();
+ }
+ addItemToChunk(item, currentChunk);
+ }
+
+ if (getChunkSize(currentChunk) > 0) {
+ trackChunks.push(currentChunk);
+ }
+
+ return trackChunks;
+};
+
+const cleanTrackChunk = (chunk: {
+ attributes: unknown[];
+ events: unknown[];
+ purchases: unknown[];
+}) => {
+ const { attributes, events, purchases } = chunk;
+ const cleanChunk: Record = {};
if (attributes.length > 0) {
cleanChunk.attributes = attributes;
}
@@ -594,41 +754,81 @@ const cleanTrackChunk = ({ attributes, events, purchases }) => {
return cleanChunk;
};
-const addTrackStats = (chunk, destination) => {
+const addTrackStats = (
+ chunk: { attributes?: unknown[]; events?: unknown[]; purchases?: unknown[] },
+ destination: BrazeDestination,
+) => {
const { attributes, events, purchases } = chunk;
+ let totalCount = 0;
if (attributes) {
- stats.gauge('braze_batch_attributes_pack_size', attributes.length, {
+ totalCount += attributes.length;
+ stats.histogram('braze_batch_attributes_pack_size', attributes.length, {
destination_id: destination.ID,
});
}
if (events) {
- stats.gauge('braze_batch_events_pack_size', events.length, {
+ totalCount += events.length;
+ stats.histogram('braze_batch_events_pack_size', events.length, {
destination_id: destination.ID,
});
}
if (purchases) {
- stats.gauge('braze_batch_purchase_pack_size', purchases.length, {
+ totalCount += purchases.length;
+ stats.histogram('braze_batch_purchase_pack_size', purchases.length, {
destination_id: destination.ID,
});
}
+ stats.histogram('braze_batch_total_pack_size', totalCount, {
+ destination_id: destination.ID,
+ });
};
-const processBatch = (transformedEvents) => {
- const { destination } = transformedEvents[0];
- const attributesArray = [];
- const eventsArray = [];
- const purchaseArray = [];
- const successMetadata = [];
- const failureResponses = [];
- const filteredResponses = [];
- const subscriptionsArray = [];
- const mergeUsersArray = [];
+let mauWorkspaceSkipIds: string | Map = 'ALL';
+if (isDefinedAndNotNull(process.env.DEST_BRAZE_MAU_WORKSPACE_IDS_SKIP_LIST)) {
+ const skipList = process.env.DEST_BRAZE_MAU_WORKSPACE_IDS_SKIP_LIST!;
+ switch (skipList) {
+ case 'ALL':
+ mauWorkspaceSkipIds = 'ALL';
+ break;
+ case 'NONE':
+ mauWorkspaceSkipIds = 'NONE';
+ break;
+ default:
+ mauWorkspaceSkipIds = new Map(skipList.split(',').map((s) => [s.trim(), true]));
+ }
+}
+
+const isWorkspaceOnMauPlan = (workspaceId) => {
+ const environmentVariable = mauWorkspaceSkipIds;
+ switch (environmentVariable) {
+ case 'ALL':
+ return false;
+ case 'NONE':
+ return true;
+ default: {
+ return !(mauWorkspaceSkipIds as Map).has(workspaceId);
+ }
+ }
+};
+
+const processBatch = (transformedEvents: BrazeTransformedEvent[]) => {
+ const { destination, metadata } = transformedEvents[0];
+ const workspaceId = metadata?.[0]?.workspaceId || '';
+ const dest = destination;
+ const attributesArray: BrazeUserAttributes[] = [];
+ const eventsArray: BrazeEvent[] = [];
+ const purchaseArray: BrazePurchase[] = [];
+ const successMetadata: Partial[] = [];
+ const failureResponses: BrazeTransformedEvent[] = [];
+ const filteredResponses: BrazeTransformedEvent[] = [];
+ const subscriptionsArray: BrazeSubscriptionGroup[] = [];
+ const mergeUsersArray: BrazeMergeUpdate[] = [];
for (const transformedEvent of transformedEvents) {
- if (!isHttpStatusSuccess(transformedEvent?.statusCode)) {
+ if (!isHttpStatusSuccess(transformedEvent.statusCode)) {
failureResponses.push(transformedEvent);
- } else if (transformedEvent?.statusCode === HTTP_STATUS_CODES.FILTER_EVENTS) {
+ } else if (transformedEvent.statusCode === HTTP_STATUS_CODES.FILTER_EVENTS) {
filteredResponses.push(transformedEvent);
- } else if (transformedEvent?.batchedRequest?.body?.JSON) {
+ } else if (transformedEvent.batchedRequest?.body?.JSON) {
const { attributes, events, purchases, subscription_groups, merge_updates } =
transformedEvent.batchedRequest.body.JSON;
if (Array.isArray(attributes)) {
@@ -649,24 +849,30 @@ const processBatch = (transformedEvents) => {
mergeUsersArray.push(...merge_updates);
}
- successMetadata.push(...transformedEvent.metadata);
+ if (transformedEvent.metadata) {
+ successMetadata.push(...transformedEvent.metadata);
+ }
}
}
- const trackChunks = batchForTrackAPI(attributesArray, eventsArray, purchaseArray);
+ const isWorkspaceOnMauPlanFlag = isWorkspaceOnMauPlan(workspaceId);
+ const trackChunks = isWorkspaceOnMauPlanFlag
+ ? batchForTrackAPIV2(attributesArray, eventsArray, purchaseArray)
+ : batchForTrackAPI(attributesArray, eventsArray, purchaseArray);
const subscriptionArrayChunks = _.chunk(subscriptionsArray, SUBSCRIPTION_BRAZE_MAX_REQ_COUNT);
const mergeUsersArrayChunks = _.chunk(mergeUsersArray, ALIAS_BRAZE_MAX_REQ_COUNT);
- const responseArray = [];
- const finalResponse = [];
- const headers = {
+ const responseArray: BrazeBatchRequest[] = [];
+ const finalResponse: BrazeBatchResponse[] = [];
+ const headers: BrazeBatchHeaders = {
'Content-Type': JSON_MIME_TYPE,
Accept: JSON_MIME_TYPE,
- Authorization: `Bearer ${destination.Config.restApiKey}`,
+ Authorization: `Bearer ${dest.Config.restApiKey}`,
};
const { endpoint, path } = getTrackEndPoint(getEndpointFromConfig(destination));
for (const chunk of trackChunks) {
- const { attributes, events, purchases } = cleanTrackChunk(chunk);
+ const cleanedChunk = cleanTrackChunk(chunk);
+ const { attributes, events, purchases } = cleanedChunk;
addTrackStats(chunk, destination);
const response = defaultRequestConfig();
@@ -684,8 +890,18 @@ const processBatch = (transformedEvents) => {
});
}
- prepareGroupAndAliasBatch(subscriptionArrayChunks, responseArray, destination, 'subscription');
- prepareGroupAndAliasBatch(mergeUsersArrayChunks, responseArray, destination, 'merge');
+ prepareGroupAndAliasBatch({
+ arrayChunks: subscriptionArrayChunks,
+ responseArray,
+ destination,
+ type: 'subscription',
+ });
+ prepareGroupAndAliasBatch({
+ arrayChunks: mergeUsersArrayChunks,
+ responseArray,
+ destination,
+ type: 'merge',
+ });
if (successMetadata.length > 0) {
finalResponse.push({
@@ -722,8 +938,8 @@ const processBatch = (transformedEvents) => {
}
Ref: https://www.braze.com/docs/api/identifier_types/?tab=app%20ids
*/
-const addAppId = (payload, message) => {
- const integrationsObj = getIntegrationsObj(message, 'BRAZE');
+const addAppId = (payload: Record, message: Record) => {
+ const integrationsObj = getIntegrationsObj(message, DESTINATION.toUpperCase() as any);
if (integrationsObj?.appId) {
const { appId: appIdValue } = integrationsObj;
return {
@@ -734,7 +950,7 @@ const addAppId = (payload, message) => {
return { ...payload };
};
-function setExternalId(payload, message) {
+function setExternalId(payload: Record, message: Record) {
const externalId = getDestinationExternalID(message, 'brazeExternalId') || message.userId;
if (externalId) {
payload.external_id = externalId;
@@ -742,8 +958,8 @@ function setExternalId(payload, message) {
return payload;
}
-function setAliasObject(payload, message) {
- const integrationsObj = getIntegrationsObj(message, 'BRAZE');
+function setAliasObject(payload: Record, message: RudderBrazeMessage) {
+ const integrationsObj = getIntegrationsObj(message, DESTINATION.toUpperCase() as any);
if (
isDefinedAndNotNull(integrationsObj?.alias?.alias_name) &&
isDefinedAndNotNull(integrationsObj?.alias?.alias_label)
@@ -762,7 +978,7 @@ function setAliasObject(payload, message) {
return payload;
}
-function setExternalIdOrAliasObject(payload, message) {
+function setExternalIdOrAliasObject(payload: Record, message: RudderBrazeMessage) {
const userId = getFieldValueFromMessage(message, 'userIdOnly');
if (userId || getDestinationExternalID(message, 'brazeExternalId')) {
return setExternalId(payload, message);
@@ -773,7 +989,13 @@ function setExternalIdOrAliasObject(payload, message) {
return setAliasObject(payload, message);
}
-function addMandatoryPurchaseProperties(productId, price, currencyCode, quantity, timestamp) {
+function addMandatoryPurchaseProperties(
+ productId: string,
+ price: number,
+ currencyCode: string,
+ quantity: number,
+ timestamp: unknown,
+) {
return {
product_id: productId,
price,
@@ -783,7 +1005,7 @@ function addMandatoryPurchaseProperties(productId, price, currencyCode, quantity
};
}
-function getPurchaseObjs(message, config) {
+function getPurchaseObjs(message: RudderBrazeMessage, config: BrazeDestinationConfig) {
// ref:https://www.braze.com/docs/api/objects_filters/purchase_object/
const validateForPurchaseEvent = () => {
const { properties } = message;
@@ -865,19 +1087,23 @@ function getPurchaseObjs(message, config) {
};
validateForPurchaseEvent();
- const { products, currency: currencyCode } = message.properties;
+ // After validation, we know properties exists and has products
+ const { products, currency: currencyCode } = message.properties!;
const timestamp = getFieldValueFromMessage(message, 'timestamp');
- const purchaseObjs = [];
+ const purchaseObjs: unknown[] = [];
// we have to make a separate purchase object for each product
- products.forEach((product) => {
+ // After validation, products is guaranteed to exist and be a non-empty array
+ products!.forEach((product) => {
const productId = product.product_id || product.sku;
const { price, quantity, currency: prodCur } = product;
- let purchaseObj = addMandatoryPurchaseProperties(
+ // Convert to string first to handle any type (number, string, etc.)
+ // then parse to ensure correct type for Braze API
+ let purchaseObj: Record = addMandatoryPurchaseProperties(
String(productId),
- Number.parseFloat(price),
- currencyCode || prodCur,
- Number.parseInt(quantity, 10),
+ Number.parseFloat(String(price)),
+ String(currencyCode || prodCur),
+ Number.parseInt(String(quantity), 10),
timestamp,
);
const extraProperties = _.omit(product, BRAZE_PURCHASE_STANDARD_PROPERTIES);
@@ -891,7 +1117,12 @@ function getPurchaseObjs(message, config) {
return purchaseObjs;
}
-const collectStatsForAliasFailure = (brazeResponse, destinationId) => {
+const collectStatsForAliasFailure = (
+ brazeResponse: {
+ aliases_processed?: number;
+ },
+ destinationId: string,
+) => {
/**
* Braze Response for Alias failure
* {
@@ -925,11 +1156,11 @@ const collectStatsForAliasFailure = (brazeResponse, destinationId) => {
}
};
-const collectStatsForAliasMissConfigurations = (destinationId) => {
+const collectStatsForAliasMissConfigurations = (destinationId: string) => {
stats.increment('braze_alias_missconfigured_count', { destination_id: destinationId });
};
-function handleReservedProperties(props) {
+function handleReservedProperties(props: Record): Record {
if (typeof props !== 'object') {
throw new InstrumentationError('Invalid event properties');
}
@@ -939,7 +1170,7 @@ function handleReservedProperties(props) {
return _.omit(props, reserved);
}
-module.exports = {
+export {
BrazeDedupUtility,
CustomAttributeOperationUtil,
getEndpointFromConfig,
@@ -957,4 +1188,5 @@ module.exports = {
handleReservedProperties,
combineSubscriptionGroups,
batchForTrackAPI,
+ batchForTrackAPIV2,
};
diff --git a/src/v0/destinations/campaign_manager/config.js b/src/v0/destinations/campaign_manager/config.js
index 5ea1972a847..7852a1fdebe 100644
--- a/src/v0/destinations/campaign_manager/config.js
+++ b/src/v0/destinations/campaign_manager/config.js
@@ -1,6 +1,6 @@
const { getMappingConfig } = require('../../util');
-const BASE_URL = 'https://dfareporting.googleapis.com/dfareporting/v4/userprofiles';
+const BASE_URL = 'https://dfareporting.googleapis.com/dfareporting/v5/userprofiles';
const ConfigCategories = {
TRACK: {
diff --git a/src/v0/destinations/criteo_audience/README.md b/src/v0/destinations/criteo_audience/README.md
index 1cb6fbff4a9..c21ce54ea33 100644
--- a/src/v0/destinations/criteo_audience/README.md
+++ b/src/v0/destinations/criteo_audience/README.md
@@ -67,7 +67,7 @@ Where `[op]` can be either `add` or `remove` to specify the operation type.
### Endpoints
-- **Audience Management**: `https://api.criteo.com/2025-04/audiences/{audienceId}/contactlist` - HTTP Method: PATCH
+- **Audience Management**: `https://api.criteo.com/2025-10/audiences/{audienceId}/contactlist` - HTTP Method: PATCH
### Authentication
@@ -78,10 +78,11 @@ Where `[op]` can be either `add` or `remove` to specify the operation type.
### Rate Limits
- **Rate Limiting**: 429 status code indicates rate limit exceeded
-- **App Level Limit**: Maximum 250 requests per minute
+- **RudderStack uses Authorization Code apps** – 10 calls/min per account per consent granter (baseline; auto-scales with number of accounts). See [Criteo Authorization Code Setup](https://developers.criteo.com/marketing-solutions/docs/authorization-code-setup) and [Rate Limits](https://developers.criteo.com/marketing-solutions/v2025.10/docs/rate-limits).
+- **Client Credentials apps**: 250 calls/min (app-level, unchanged in 2025-10)
- **Response Headers**:
- `x-ratelimit-limit`: Number of calls your App can perform
- - `x-ratelimit-remaining`: Number of calls remaining (resets to 250 every minute)
+ - `x-ratelimit-remaining`: Number of calls remaining (reset value varies by app type)
- `x-ratelimit-reset`: Timestamp for rate limit reset
- **Handling Strategy**: Automatic retry with exponential backoff
- **Batch Size**: Maximum 50,000 identifiers per request
diff --git a/src/v0/destinations/criteo_audience/config.js b/src/v0/destinations/criteo_audience/config.js
index 3d56c491dca..86ee0e7f56e 100644
--- a/src/v0/destinations/criteo_audience/config.js
+++ b/src/v0/destinations/criteo_audience/config.js
@@ -1,4 +1,4 @@
-const version = '2025-04';
+const version = '2025-10';
const BASE_ENDPOINT = `https://api.criteo.com/${version}/`;
const operation = ['add', 'remove'];
// https://developers.criteo.com/marketing-solutions/docs/audience-segments#manage-contact-lists
diff --git a/src/v0/destinations/customerio_audience/config.ts b/src/v0/destinations/customerio_audience/config.ts
index c6acbcdb6e4..994523c0f88 100644
--- a/src/v0/destinations/customerio_audience/config.ts
+++ b/src/v0/destinations/customerio_audience/config.ts
@@ -2,4 +2,8 @@ export const MAX_ITEMS = 1000;
export const DEFAULT_ID_TYPE = 'id';
-export const BASE_ENDPOINT = 'https://track.customer.io/api/v1/segments';
+const US_BASE_ENDPOINT = 'https://track.customer.io/api/v1/segments';
+const EU_BASE_ENDPOINT = 'https://track-eu.customer.io/api/v1/segments';
+
+export const getBaseEndpoint = (region?: string): string =>
+ region === 'EU' ? EU_BASE_ENDPOINT : US_BASE_ENDPOINT;
diff --git a/src/v0/destinations/customerio_audience/type.ts b/src/v0/destinations/customerio_audience/type.ts
index 9197dd62595..10b066f5cde 100644
--- a/src/v0/destinations/customerio_audience/type.ts
+++ b/src/v0/destinations/customerio_audience/type.ts
@@ -45,6 +45,8 @@ export const CustomerIODestinationConfigSchema = z
apiKey: z.string(),
appApiKey: z.string(),
siteId: z.string(),
+ // keeping it optional for backward compatibility
+ region: z.enum(['US', 'EU']).optional(),
})
.passthrough();
diff --git a/src/v0/destinations/customerio_audience/utils.test.ts b/src/v0/destinations/customerio_audience/utils.test.ts
index 2922d84bf6d..17252eee325 100644
--- a/src/v0/destinations/customerio_audience/utils.test.ts
+++ b/src/v0/destinations/customerio_audience/utils.test.ts
@@ -41,14 +41,36 @@ describe('utils', () => {
length: 0,
},
},
+ {
+ name: 'should use EU base URL when region is EU',
+ input: {
+ insertOrUpdateRespList: [{ payload: { ids: ['user1'] }, metadata: { sourceId: '1' } }],
+ deleteRespList: [{ payload: { ids: ['user2'] }, metadata: { sourceId: '2' } }],
+ },
+ destinationOverride: {
+ Config: {
+ siteId: 'test-site',
+ apiKey: 'test-key',
+ region: 'EU',
+ },
+ },
+ expected: {
+ length: 2,
+ firstEndpoint: 'https://track-eu.customer.io/api/v1/segments/123/add_customers',
+ secondEndpoint: 'https://track-eu.customer.io/api/v1/segments/123/remove_customers',
+ },
+ },
];
- testCases.forEach(({ name, input, expected }) => {
+ testCases.forEach(({ name, input, expected, destinationOverride }) => {
test(name, () => {
+ const destination = destinationOverride
+ ? { ...mockDestination, ...destinationOverride }
+ : mockDestination;
const result = batchResponseBuilder(
input.insertOrUpdateRespList,
input.deleteRespList,
- mockDestination as any,
+ destination as any,
mockConnection as any,
);
diff --git a/src/v0/destinations/customerio_audience/utils.ts b/src/v0/destinations/customerio_audience/utils.ts
index be5f7010944..a259ef424fb 100644
--- a/src/v0/destinations/customerio_audience/utils.ts
+++ b/src/v0/destinations/customerio_audience/utils.ts
@@ -5,7 +5,7 @@ import {
InstrumentationError,
} from '@rudderstack/integrations-lib';
import { BatchUtils } from '@rudderstack/workflow-engine';
-import { BASE_ENDPOINT, DEFAULT_ID_TYPE, MAX_ITEMS } from './config';
+import { getBaseEndpoint, DEFAULT_ID_TYPE, MAX_ITEMS } from './config';
import {
CustomerIOConnection,
CustomerIODestination,
@@ -107,17 +107,18 @@ export const batchResponseBuilder = (
connection: CustomerIOConnection,
): CustomerIOBatchResponse[] => {
const segmentId = getSegmentId(connection);
+ const baseEndpoint = getBaseEndpoint(destination.Config?.region);
const insertResponses = processBatch(
insertOrUpdateRespList,
- `${BASE_ENDPOINT}/${segmentId}/add_customers`,
+ `${baseEndpoint}/${segmentId}/add_customers`,
destination,
connection,
);
const deleteResponses = processBatch(
deleteRespList,
- `${BASE_ENDPOINT}/${segmentId}/remove_customers`,
+ `${baseEndpoint}/${segmentId}/remove_customers`,
destination,
connection,
);
diff --git a/src/v0/destinations/fb_custom_audience/config.js b/src/v0/destinations/fb_custom_audience/config.ts
similarity index 59%
rename from src/v0/destinations/fb_custom_audience/config.js
rename to src/v0/destinations/fb_custom_audience/config.ts
index ae627b5406b..f4bf5902df6 100644
--- a/src/v0/destinations/fb_custom_audience/config.js
+++ b/src/v0/destinations/fb_custom_audience/config.ts
@@ -2,11 +2,11 @@ const BASE_URL = 'https://graph.facebook.com/v23.0';
const ENDPOINT_PATH = 'users';
-function getEndPoint(audienceId) {
+function getEndPoint(audienceId: string): string {
return `${BASE_URL}/${audienceId}/${ENDPOINT_PATH}`;
}
-const schemaFields = [
+const schemaFields: readonly string[] = [
'EXTERN_ID',
'EMAIL',
'PHONE',
@@ -25,7 +25,7 @@ const schemaFields = [
'LOOKALIKE_VALUE',
];
-const typeFields = [
+const typeFields: readonly string[] = [
'UNKNOWN',
'FILE_IMPORTED',
'EVENT_BASED',
@@ -36,7 +36,7 @@ const typeFields = [
'HOUSEHOLD_AUDIENCE',
];
-const subTypeFields = [
+const subTypeFields: readonly string[] = [
'ANYTHING',
'NOTHING',
'HASHES',
@@ -93,6 +93,8 @@ const subTypeFields = [
'DATA_FILE',
];
+const DESTINATION = 'fb_custom_audience';
+
const USER_ADD = 'add';
const USER_DELETE = 'remove';
// https://developers.facebook.com/docs/marketing-api/audiences/guides/custom-audiences/
@@ -100,8 +102,43 @@ const MAX_USER_COUNT = 10000;
/* No official Documentation is available for this but using trial
and error method we found that 65000 bytes is the maximum payload allowed size but we are 60000 just to be sure batching is done properly
*/
-const maxPayloadSize = 60000; // bytes
-module.exports = {
+const DEFAULT_MAX_PAYLOAD_SIZE = 60000; // bytes
+
+/**
+ * Returns the maximum payload size in bytes for FB Custom Audience batching.
+ * Can be overridden per workspace via env var FB_CUSTOM_AUDIENCE_MAX_PAYLOAD_SIZE_,
+ * or globally via FB_CUSTOM_AUDIENCE_MAX_PAYLOAD_SIZE. Defaults to 60000.
+ */
+function getMaxPayloadSize(workspaceId: string): number {
+ if (workspaceId) {
+ const workspaceVal = Number.parseInt(
+ process.env[`FB_CUSTOM_AUDIENCE_MAX_PAYLOAD_SIZE_${workspaceId}`] ?? '',
+ 10,
+ );
+ if (!Number.isNaN(workspaceVal) && workspaceVal > 0) {
+ return workspaceVal;
+ }
+ }
+ const globalVal = Number.parseInt(process.env.FB_CUSTOM_AUDIENCE_MAX_PAYLOAD_SIZE ?? '', 10);
+ if (!Number.isNaN(globalVal) && globalVal > 0) {
+ return globalVal;
+ }
+ return DEFAULT_MAX_PAYLOAD_SIZE;
+}
+
+/**
+ * Whether to reject invalid field values (e.g., malformed emails, invalid country codes)
+ * by replacing them with empty strings. When disabled, invalid values are passed through as-is.
+ *
+ * Controlled via env var: FB_CUSTOM_AUDIENCE_REJECT_INVALID_FIELDS=true
+ * Default: false
+ */
+function isRejectInvalidFieldsEnabled(): boolean {
+ return process.env.FB_CUSTOM_AUDIENCE_REJECT_INVALID_FIELDS === 'true';
+}
+
+export {
+ DESTINATION,
ENDPOINT_PATH,
getEndPoint,
schemaFields,
@@ -110,5 +147,6 @@ module.exports = {
MAX_USER_COUNT,
typeFields,
subTypeFields,
- maxPayloadSize,
+ getMaxPayloadSize,
+ isRejectInvalidFieldsEnabled,
};
diff --git a/src/v0/destinations/fb_custom_audience/docs/businesslogic.md b/src/v0/destinations/fb_custom_audience/docs/businesslogic.md
index 5f53885e48e..9b60b5a41df 100644
--- a/src/v0/destinations/fb_custom_audience/docs/businesslogic.md
+++ b/src/v0/destinations/fb_custom_audience/docs/businesslogic.md
@@ -56,15 +56,15 @@ Facebook Custom Audience supports the following user identifier fields:
#### Name Fields (LN, FN, FI)
-- **LN/FN**: Remove special characters except `#$%&'*+/`, convert to lowercase
-- **FI**: Remove special characters except `!"#$%&'()*+,-./`, convert to lowercase
+- **LN/FN**: Trim, lowercase, remove ASCII punctuation (`!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~` and backtick). Spaces, digits, accented letters, and non-ASCII (UTF-8) characters are preserved.
+- **FI**: Trim, lowercase, remove special characters except `!"#$%&'()*+,-./`
- **Hashing**: SHA-256 applied
#### Geographic Fields (CT, ST, ZIP, COUNTRY)
-- **CT/ST**: Remove non-alphabetic characters, remove spaces, lowercase
-- **ZIP**: Remove spaces, lowercase
-- **COUNTRY**: Lowercase
+- **CT/ST**: Trim, remove non-alphabetic characters, remove spaces, lowercase
+- **ZIP**: Trim, remove spaces and dashes, lowercase
+- **COUNTRY**: Trim, lowercase
- **Hashing**: SHA-256 applied
#### Special Fields
diff --git a/src/v0/destinations/fb_custom_audience/networkHandler.js b/src/v0/destinations/fb_custom_audience/networkHandler.js
deleted file mode 100644
index 0ea7aff7da6..00000000000
--- a/src/v0/destinations/fb_custom_audience/networkHandler.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const { networkHandler, errorResponseHandler } = require('../../util/facebookUtils/networkHandler');
-
-module.exports = {
- networkHandler,
- errorResponseHandler,
-};
diff --git a/src/v0/destinations/fb_custom_audience/networkHandler.ts b/src/v0/destinations/fb_custom_audience/networkHandler.ts
new file mode 100644
index 00000000000..2818463844c
--- /dev/null
+++ b/src/v0/destinations/fb_custom_audience/networkHandler.ts
@@ -0,0 +1 @@
+export { networkHandler, errorResponseHandler } from '../../util/facebookUtils/networkHandler';
diff --git a/src/v0/destinations/fb_custom_audience/recordTransform.js b/src/v0/destinations/fb_custom_audience/recordTransform.ts
similarity index 63%
rename from src/v0/destinations/fb_custom_audience/recordTransform.js
rename to src/v0/destinations/fb_custom_audience/recordTransform.ts
index ec93a80c137..4601f2b5962 100644
--- a/src/v0/destinations/fb_custom_audience/recordTransform.js
+++ b/src/v0/destinations/fb_custom_audience/recordTransform.ts
@@ -1,25 +1,34 @@
/* eslint-disable no-const-assign */
-const lodash = require('lodash');
-const {
+import lodash from 'lodash';
+import {
InstrumentationError,
ConfigurationError,
groupByInBatches,
forEachInBatches,
- mapInBatches,
-} = require('@rudderstack/integrations-lib');
-const { schemaFields, MAX_USER_COUNT } = require('./config');
-const stats = require('../../../util/stats');
-const {
+} from '@rudderstack/integrations-lib';
+import type { Metadata } from '../../../types';
+import type {
+ FbCustomAudienceDestination,
+ FbCustomAudiencePayload,
+ PrepareParams,
+ WrappedResponse,
+ RecordPrepareConfig,
+ FbRecordEvent,
+} from './types';
+import { schemaFields, MAX_USER_COUNT } from './config';
+import {
getDestinationExternalIDInfoForRetl,
- isDefinedAndNotNullAndNotEmpty,
checkSubsetOfArray,
returnArrayOfSubarrays,
getSuccessRespEvents,
+ getErrorRespEvents,
+ generateErrorObject,
isEventSentByVDMV2Flow,
isEventSentByVDMV1Flow,
-} = require('../../util');
-const { getErrorResponse, createFinalResponse } = require('../../util/recordUtils');
-const {
+ isDefinedAndNotNullAndNotEmpty,
+} from '../../util';
+import { getErrorResponse, createFinalResponse } from '../../util/recordUtils';
+import {
ensureApplicableFormat,
getUpdatedDataElement,
getSchemaForEventMappedToDest,
@@ -27,7 +36,7 @@ const {
responseBuilderSimple,
getDataSource,
generateAppSecretProof,
-} = require('./util');
+} from './util';
/**
* Processes a single record and updates the data element.
@@ -37,20 +46,39 @@ const {
* @param {boolean} disableFormat - Whether formatting is disabled.
* @returns {Object} - The processed data element and metadata.
*/
-const processRecord = (record, userSchema, isHashRequired, disableFormat) => {
- const { fields } = record.message;
- let dataElement = [];
+const processRecord = (
+ record: FbRecordEvent,
+ userSchema: string[],
+ isHashRequired: boolean,
+ disableFormat: boolean | undefined,
+ workspaceId: string,
+ destinationId: string,
+): { metadata: Metadata } & ({ dataElement: unknown[] } | { error: string }) => {
+ const fields = record.message.fields!;
+ let dataElement: unknown[] = [];
let nullUserData = true;
userSchema.forEach((eachProperty) => {
const userProperty = fields[eachProperty];
- let updatedProperty = userProperty;
+ let updatedProperty: unknown = userProperty;
if (isHashRequired && !disableFormat) {
- updatedProperty = ensureApplicableFormat(eachProperty, userProperty);
+ updatedProperty = ensureApplicableFormat(
+ eachProperty,
+ userProperty,
+ workspaceId,
+ destinationId,
+ );
}
- dataElement = getUpdatedDataElement(dataElement, isHashRequired, eachProperty, updatedProperty);
+ dataElement = getUpdatedDataElement(
+ dataElement,
+ isHashRequired,
+ eachProperty,
+ updatedProperty,
+ record.metadata.workspaceId,
+ record.destination.ID,
+ );
if (dataElement[dataElement.length - 1]) {
nullUserData = false;
@@ -58,10 +86,10 @@ const processRecord = (record, userSchema, isHashRequired, disableFormat) => {
});
if (nullUserData) {
- stats.increment('fb_custom_audience_event_having_all_null_field_values_for_a_user', {
- destinationId: record.destination.ID,
- nullFields: userSchema,
- });
+ return {
+ error: `All user properties [${userSchema.join(', ')}] are invalid or null. At least one valid field is required.`,
+ metadata: record.metadata,
+ };
}
return { dataElement, metadata: record.metadata };
@@ -77,32 +105,54 @@ const processRecord = (record, userSchema, isHashRequired, disableFormat) => {
* @returns {Array} - The response events to send.
*/
const processRecordEventArray = async (
- recordChunksArray,
- config,
- destination,
- operation,
- audienceId,
+ recordChunksArray: FbRecordEvent[][],
+ config: RecordPrepareConfig,
+ destination: FbCustomAudienceDestination,
+ operation: string,
+ audienceId: string,
) => {
const { userSchema, isHashRequired, disableFormat, paramsPayload, prepareParams } = config;
- const toSendEvents = [];
- const metadata = [];
+ const toSendEvents: unknown[] = [];
+ const metadata: Metadata[] = [];
+ const invalidEvents: unknown[] = [];
await forEachInBatches(recordChunksArray, async (recordArray) => {
- const data = await mapInBatches(recordArray, async (input) => {
- const { dataElement, metadata: recordMetadata } = processRecord(
+ const data: unknown[][] = [];
+ await forEachInBatches(recordArray, async (input) => {
+ const result = processRecord(
input,
userSchema,
isHashRequired,
disableFormat,
+ input.metadata.workspaceId,
+ destination.ID,
);
- metadata.push(recordMetadata);
- return dataElement;
+ if ('error' in result) {
+ const error = new InstrumentationError(result.error);
+ const errorObj = generateErrorObject(error);
+ invalidEvents.push(
+ getErrorRespEvents(
+ [result.metadata],
+ errorObj.status,
+ errorObj.message,
+ errorObj.statTags,
+ ),
+ );
+ } else {
+ data.push(result.dataElement!);
+ metadata.push(result.metadata);
+ }
});
+ if (data.length === 0) {
+ return;
+ }
+
const prepareFinalPayload = lodash.cloneDeep(paramsPayload);
prepareFinalPayload.schema = userSchema;
prepareFinalPayload.data = data;
- const payloadBatches = batchingWithPayloadSize(prepareFinalPayload);
+ const workspaceId = recordChunksArray[0]?.[0]?.metadata?.workspaceId;
+ const payloadBatches = batchingWithPayloadSize(prepareFinalPayload, workspaceId);
payloadBatches.forEach((payloadBatch) => {
const response = {
@@ -110,7 +160,7 @@ const processRecordEventArray = async (
payload: payloadBatch,
};
- const wrappedResponse = {
+ const wrappedResponse: WrappedResponse = {
responseField: response,
operationCategory: operation,
};
@@ -120,7 +170,12 @@ const processRecordEventArray = async (
});
});
- return getSuccessRespEvents(toSendEvents, metadata, destination, true);
+ const successResponse =
+ toSendEvents.length > 0
+ ? getSuccessRespEvents(toSendEvents, metadata, destination, true)
+ : null;
+
+ return { successResponse, invalidEvents };
};
/**
@@ -129,7 +184,19 @@ const processRecordEventArray = async (
* @param {Object} config - The configuration object.
* @returns {Array} - The final response payload.
*/
-async function preparePayload(events, config) {
+async function preparePayload(
+ events: FbRecordEvent[],
+ config: {
+ audienceId: string | null | undefined;
+ userSchema: string[];
+ isRaw?: boolean;
+ type?: string;
+ subType?: string;
+ isHashRequired: boolean;
+ disableFormat?: boolean;
+ isValueBasedAudience?: boolean;
+ },
+) {
const {
audienceId,
userSchema,
@@ -142,14 +209,14 @@ async function preparePayload(events, config) {
} = config;
const { destination } = events[0];
const { accessToken, appSecret } = destination.Config;
- const prepareParams = {
+ const prepareParams: PrepareParams = {
access_token: accessToken,
};
if (isDefinedAndNotNullAndNotEmpty(appSecret)) {
const dateNow = Date.now();
prepareParams.appsecret_time = Math.floor(dateNow / 1000); // Get current Unix time in seconds
- prepareParams.appsecret_proof = generateAppSecretProof(accessToken, appSecret, dateNow);
+ prepareParams.appsecret_proof = generateAppSecretProof(accessToken, appSecret!, dateNow);
}
const cleanUserSchema = userSchema.map((field) => field.trim());
@@ -166,7 +233,7 @@ async function preparePayload(events, config) {
);
}
- const paramsPayload = {};
+ const paramsPayload: FbCustomAudiencePayload = {};
if (isRaw) {
paramsPayload.is_raw = isRaw;
@@ -178,10 +245,10 @@ async function preparePayload(events, config) {
}
const groupedRecordsByAction = await groupByInBatches(events, (record) =>
- record.message.action?.toLowerCase(),
+ (record.message.action ?? '').toLowerCase(),
);
- const processAction = async (action, operation) => {
+ const processAction = async (action: string, operation: string) => {
if (groupedRecordsByAction[action]) {
if (
isValueBasedAudience &&
@@ -207,7 +274,7 @@ async function preparePayload(events, config) {
},
destination,
operation,
- audienceId,
+ audienceId!,
);
}
return null;
@@ -217,14 +284,20 @@ async function preparePayload(events, config) {
const insertResponse = await processAction('insert', 'add');
const updateResponse = await processAction('update', 'add');
- const errorResponse = getErrorResponse(groupedRecordsByAction);
+ const errorResponse = [
+ ...getErrorResponse(groupedRecordsByAction),
+ ...(deleteResponse?.invalidEvents || []),
+ ...(insertResponse?.invalidEvents || []),
+ ...(updateResponse?.invalidEvents || []),
+ ];
const finalResponse = createFinalResponse(
- deleteResponse,
- insertResponse,
- updateResponse,
+ deleteResponse?.successResponse,
+ insertResponse?.successResponse,
+ updateResponse?.successResponse,
errorResponse,
);
+
if (finalResponse.length === 0) {
throw new InstrumentationError(
'Missing valid parameters, unable to generate transformed payload',
@@ -238,14 +311,14 @@ async function preparePayload(events, config) {
* @param {Array} groupedRecordInputs - The grouped record inputs.
* @returns {Array} - The processed payload.
*/
-async function processRecordInputsV1(groupedRecordInputs) {
+async function processRecordInputsV1(groupedRecordInputs: FbRecordEvent[]) {
const { destination } = groupedRecordInputs[0];
const { message } = groupedRecordInputs[0];
const { isHashRequired, disableFormat, type, subType, isRaw, audienceId, userSchema } =
destination.Config;
- let operationAudienceId = audienceId;
- let updatedUserSchema = userSchema;
+ let operationAudienceId: string | null = audienceId;
+ let updatedUserSchema = userSchema as string[];
if (isEventSentByVDMV1Flow(groupedRecordInputs[0])) {
const { objectType } = getDestinationExternalIDInfoForRetl(message, 'FB_CUSTOM_AUDIENCE');
operationAudienceId = objectType;
@@ -268,16 +341,16 @@ async function processRecordInputsV1(groupedRecordInputs) {
* @param {Array} groupedRecordInputs - The grouped record inputs.
* @returns {Array} - The processed payload.
*/
-const processRecordInputsV2 = async (groupedRecordInputs) => {
+const processRecordInputsV2 = async (groupedRecordInputs: FbRecordEvent[]) => {
const { connection, message } = groupedRecordInputs[0];
const { isHashRequired, disableFormat, type, subType, isRaw, audienceId, isValueBasedAudience } =
- connection.config.destination;
+ connection!.config.destination;
const identifiers = message?.identifiers;
- let userSchema;
+ let userSchema: string[] | undefined;
if (identifiers) {
userSchema = Object.keys(identifiers);
}
- const events = groupedRecordInputs.map((record) => ({
+ const events: FbRecordEvent[] = groupedRecordInputs.map((record) => ({
...record,
message: {
...record.message,
@@ -286,7 +359,7 @@ const processRecordInputsV2 = async (groupedRecordInputs) => {
}));
return preparePayload(events, {
audienceId,
- userSchema,
+ userSchema: userSchema!,
isRaw,
type,
subType,
@@ -301,7 +374,7 @@ const processRecordInputsV2 = async (groupedRecordInputs) => {
* @param {Array} groupedRecordInputs - The grouped record inputs.
* @returns {Array} - The processed payload.
*/
-async function processRecordInputs(groupedRecordInputs) {
+async function processRecordInputs(groupedRecordInputs: FbRecordEvent[]) {
const event = groupedRecordInputs[0];
// First check for rETL flow and second check for ES flow
if (isEventSentByVDMV1Flow(event) || !isEventSentByVDMV2Flow(event)) {
@@ -310,6 +383,4 @@ async function processRecordInputs(groupedRecordInputs) {
return processRecordInputsV2(groupedRecordInputs);
}
-module.exports = {
- processRecordInputs,
-};
+export { processRecordInputs };
diff --git a/src/v0/destinations/fb_custom_audience/transform.js b/src/v0/destinations/fb_custom_audience/transform.ts
similarity index 65%
rename from src/v0/destinations/fb_custom_audience/transform.js
rename to src/v0/destinations/fb_custom_audience/transform.ts
index 515e1ec8342..e00c9aa0d70 100644
--- a/src/v0/destinations/fb_custom_audience/transform.js
+++ b/src/v0/destinations/fb_custom_audience/transform.ts
@@ -1,30 +1,42 @@
-const lodash = require('lodash');
-const {
+import lodash from 'lodash';
+import {
InstrumentationError,
ConfigurationError,
groupByInBatches,
-} = require('@rudderstack/integrations-lib');
-const {
+} from '@rudderstack/integrations-lib';
+import {
checkSubsetOfArray,
isDefinedAndNotNullAndNotEmpty,
returnArrayOfSubarrays,
flattenMap,
simpleProcessRouterDest,
-} = require('../../util');
-const {
+ getValueFromMessage,
+} from '../../util';
+import {
prepareDataField,
batchingWithPayloadSize,
generateAppSecretProof,
responseBuilderSimple,
getDataSource,
-} = require('./util');
-const { schemaFields, USER_ADD, USER_DELETE, MAX_USER_COUNT } = require('./config');
+} from './util';
+import { schemaFields, USER_ADD, USER_DELETE, MAX_USER_COUNT } from './config';
+import { processRecordInputs } from './recordTransform';
+import logger from '../../../logger';
+import type {
+ FbCustomAudienceDestination,
+ FbCustomAudiencePayload,
+ FbCustomAudienceRequestParams,
+ WrappedResponse,
+ FbRecordEvent,
+ PrepareParams,
+} from './types';
+import type { RudderMessage } from '../../../types';
-const { processRecordInputs } = require('./recordTransform');
-const logger = require('../../../logger');
-
-function checkForUnsupportedEventTypes(dictionary, keyList) {
- const unsupportedEventTypes = [];
+function checkForUnsupportedEventTypes(
+ dictionary: Record,
+ keyList: string[],
+): string[] {
+ const unsupportedEventTypes: string[] = [];
// eslint-disable-next-line no-restricted-syntax
for (const key in dictionary) {
if (!keyList.includes(key)) {
@@ -36,13 +48,14 @@ function checkForUnsupportedEventTypes(dictionary, keyList) {
// Function responsible prepare the payload field of every event parameter
const preparePayload = (
- userUpdateList,
- userSchema,
- paramsPayload,
- isHashRequired,
- disableFormat,
- destinationId,
-) => {
+ userUpdateList: Record[],
+ userSchema: string | string[],
+ paramsPayload: FbCustomAudiencePayload,
+ isHashRequired: boolean,
+ disableFormat: boolean,
+ destinationId: string,
+ workspaceId: string,
+): FbCustomAudiencePayload[] => {
const prepareFinalPayload = lodash.cloneDeep(paramsPayload);
if (Array.isArray(userSchema)) {
prepareFinalPayload.schema = userSchema;
@@ -51,36 +64,38 @@ const preparePayload = (
}
prepareFinalPayload.data = prepareDataField(
- userSchema,
+ userSchema as string[],
userUpdateList,
isHashRequired,
disableFormat,
destinationId,
+ workspaceId,
);
- return batchingWithPayloadSize(prepareFinalPayload);
+ return batchingWithPayloadSize(prepareFinalPayload, workspaceId);
};
// Function responsible for building the parameters for each event calls
const prepareResponse = (
- message,
- destination,
- allowedAudienceArray,
- userSchema,
+ message: RudderMessage,
+ destination: FbCustomAudienceDestination,
+ allowedAudienceArray: Record[],
+ userSchema: string[],
+ workspaceId: string,
isHashRequired = true,
-) => {
+): FbCustomAudienceRequestParams[] => {
const { accessToken, disableFormat, type, subType, isRaw, appSecret } = destination.Config;
- const prepareParams = {};
+ const prepareParams: PrepareParams = {
+ access_token: accessToken,
+ };
// creating the parameters field
- const paramsPayload = {};
-
- prepareParams.access_token = accessToken;
+ const paramsPayload: FbCustomAudiencePayload = {};
if (isDefinedAndNotNullAndNotEmpty(appSecret)) {
const dateNow = Date.now();
prepareParams.appsecret_time = Math.floor(dateNow / 1000); // Get current Unix time in seconds
- prepareParams.appsecret_proof = generateAppSecretProof(accessToken, appSecret, dateNow);
+ prepareParams.appsecret_proof = generateAppSecretProof(accessToken, appSecret!, dateNow);
}
// creating the payload field for parameters
@@ -98,13 +113,14 @@ const prepareResponse = (
userSchema,
paramsPayload,
isHashRequired,
- disableFormat,
+ disableFormat!,
destination.ID,
+ workspaceId,
);
- const respList = [];
+ const respList: FbCustomAudienceRequestParams[] = [];
payloadBatches.forEach((payloadBatch) => {
- const response = {
+ const response: FbCustomAudienceRequestParams = {
...prepareParams,
payload: payloadBatch,
};
@@ -120,24 +136,26 @@ const prepareResponse = (
* @returns
*/
const prepareToSendEvents = (
- message,
- destination,
- audienceChunksArray,
- userSchema,
- isHashRequired,
- operation,
-) => {
- const toSendEvents = [];
+ message: RudderMessage,
+ destination: FbCustomAudienceDestination,
+ audienceChunksArray: Record[][],
+ userSchema: string[],
+ isHashRequired: boolean,
+ operation: string,
+ workspaceId: string,
+): WrappedResponse[] => {
+ const toSendEvents: WrappedResponse[] = [];
audienceChunksArray.forEach((allowedAudienceArray) => {
const responseArray = prepareResponse(
message,
destination,
allowedAudienceArray,
userSchema,
+ workspaceId,
isHashRequired,
);
responseArray.forEach((response) => {
- const wrappedResponse = {
+ const wrappedResponse: WrappedResponse = {
responseField: response,
operationCategory: operation,
};
@@ -146,9 +164,14 @@ const prepareToSendEvents = (
});
return toSendEvents;
};
-const processEvent = (message, destination) => {
- const respList = [];
- let toSendEvents = [];
+
+const processEvent = (
+ message: RudderMessage,
+ destination: FbCustomAudienceDestination,
+ workspaceId: string,
+) => {
+ const respList: unknown[] = [];
+ let toSendEvents: WrappedResponse[] = [];
let { userSchema } = destination.Config;
const { isHashRequired, audienceId } = destination.Config;
if (!message.type) {
@@ -172,7 +195,8 @@ const processEvent = (message, destination) => {
if (!checkSubsetOfArray(schemaFields, userSchema)) {
throw new ConfigurationError('One or more of the schema fields are not supported');
}
- const { listData } = message.properties;
+ const properties = getValueFromMessage(message, 'properties');
+ const listData = properties?.listData;
// when "remove" is present in the payload
if (isDefinedAndNotNullAndNotEmpty(listData[USER_DELETE])) {
@@ -184,6 +208,7 @@ const processEvent = (message, destination) => {
userSchema,
isHashRequired,
USER_DELETE,
+ workspaceId,
);
}
@@ -198,6 +223,7 @@ const processEvent = (message, destination) => {
userSchema,
isHashRequired,
USER_ADD,
+ workspaceId,
),
);
}
@@ -214,15 +240,19 @@ const processEvent = (message, destination) => {
return respList;
};
-const process = (event) => processEvent(event.message, event.destination);
+const process = (event: {
+ message: RudderMessage;
+ destination: FbCustomAudienceDestination;
+ metadata: { workspaceId: string };
+}) => processEvent(event.message, event.destination, event.metadata?.workspaceId as string);
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = [];
+const processRouterDest = async (inputs: FbRecordEvent[], reqMetadata: unknown) => {
+ const respList: unknown[] = [];
const groupedInputs = await groupByInBatches(inputs, (input) =>
- input.message.type?.toLowerCase(),
+ (input.message.type ?? '').toLowerCase(),
);
- let transformedRecordEvent = [];
- let transformedAudienceEvent = [];
+ let transformedRecordEvent: unknown[] = [];
+ let transformedAudienceEvent: unknown[] = [];
const eventTypes = ['record', 'audiencelist'];
const unsupportedEventList = checkForUnsupportedEventTypes(groupedInputs, eventTypes);
@@ -240,6 +270,7 @@ const processRouterDest = async (inputs, reqMetadata) => {
groupedInputs.audiencelist,
process,
reqMetadata,
+ undefined,
);
}
@@ -247,4 +278,4 @@ const processRouterDest = async (inputs, reqMetadata) => {
return flattenMap(respList);
};
-module.exports = { process, processRouterDest };
+export { process, processRouterDest };
diff --git a/src/v0/destinations/fb_custom_audience/types.ts b/src/v0/destinations/fb_custom_audience/types.ts
new file mode 100644
index 00000000000..a6342acdcf7
--- /dev/null
+++ b/src/v0/destinations/fb_custom_audience/types.ts
@@ -0,0 +1,79 @@
+import type {
+ Destination,
+ Connection,
+ DestinationConnectionConfig,
+ Metadata,
+} from '../../../types';
+
+interface FbCustomAudienceDestinationConfig {
+ accessToken: string;
+ audienceId: string;
+ userSchema: string | string[];
+ isHashRequired: boolean;
+ disableFormat?: boolean;
+ type?: string;
+ subType?: string;
+ isRaw?: boolean;
+ appSecret?: string;
+ isValueBasedAudience?: boolean;
+}
+
+export type FbCustomAudienceDestination = Destination;
+
+type FbCustomAudienceV2Connection = Connection<
+ DestinationConnectionConfig
+>;
+
+export interface DataSource {
+ type?: string;
+ sub_type?: string;
+}
+
+export interface FbCustomAudiencePayload {
+ schema?: string[];
+ data?: unknown[][];
+ is_raw?: boolean;
+ data_source?: DataSource;
+}
+
+export interface PrepareParams {
+ access_token: string;
+ appsecret_time?: number;
+ appsecret_proof?: string;
+}
+
+export interface FbCustomAudienceRequestParams extends PrepareParams {
+ payload: FbCustomAudiencePayload;
+}
+
+export interface WrappedResponse {
+ responseField: FbCustomAudienceRequestParams;
+ operationCategory: string;
+}
+
+export interface RecordPrepareConfig {
+ userSchema: string[];
+ isHashRequired: boolean;
+ disableFormat?: boolean;
+ paramsPayload: FbCustomAudiencePayload;
+ prepareParams: PrepareParams;
+}
+
+export interface FbRecordMessage {
+ type?: string;
+ action?: string;
+ fields?: Record;
+ identifiers?: Record;
+ context?: {
+ destinationFields?: {
+ mappedSchema: string[] | string;
+ };
+ };
+}
+
+export interface FbRecordEvent {
+ destination: FbCustomAudienceDestination;
+ message: FbRecordMessage;
+ metadata: Metadata;
+ connection?: FbCustomAudienceV2Connection;
+}
diff --git a/src/v0/destinations/fb_custom_audience/util.test.js b/src/v0/destinations/fb_custom_audience/util.test.js
deleted file mode 100644
index 693b947945b..00000000000
--- a/src/v0/destinations/fb_custom_audience/util.test.js
+++ /dev/null
@@ -1,206 +0,0 @@
-const { getDataSource, responseBuilderSimple, getUpdatedDataElement } = require('./util');
-const { getEndPoint, ENDPOINT_PATH } = require('./config');
-
-const basePayload = {
- responseField: {
- access_token: 'ABC',
- payload: {
- schema: ['EMAIL', 'FI'],
- data: [
- [
- 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36',
- '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16',
- ],
- ],
- },
- },
-};
-
-const baseResponse = {
- version: '1',
- type: 'REST',
- endpoint: getEndPoint('23848494844100489'),
- endpointPath: ENDPOINT_PATH,
- headers: {},
- params: {
- access_token: 'ABC',
- payload: {
- schema: ['EMAIL', 'FI'],
- data: [
- [
- 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36',
- '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16',
- ],
- ],
- },
- },
- body: {
- JSON: {},
- JSON_ARRAY: {},
- XML: {},
- FORM: {},
- },
- files: {},
-};
-
-describe('FB_custom_audience utils test', () => {
- describe('getDataSource function tests', () => {
- it('Should return empty datasource if type and subType are both NA', () => {
- const expectedDataSource = {};
- const dataSource = getDataSource('NA', 'NA');
- expect(dataSource).toEqual(expectedDataSource);
- });
- it('Should set subType and type if value present in destination config macthes with preset list', () => {
- const expectedDataSource = {
- type: 'EVENT_BASED',
- };
- const dataSource = getDataSource('EVENT_BASED', 'something');
- expect(dataSource).toEqual(expectedDataSource);
- });
- });
-
- describe('responseBuilderSimple function tests', () => {
- it('Should return correct response for add payload', () => {
- const payload = basePayload;
- payload.operationCategory = 'add';
- const expectedResponse = baseResponse;
- expectedResponse.method = 'POST';
- const response = responseBuilderSimple(payload, '23848494844100489');
- expect(response).toEqual(expectedResponse);
- });
-
- it('Should return correct response for delete payload', () => {
- const payload = basePayload;
- payload.operationCategory = 'remove';
- const expectedResponse = baseResponse;
- expectedResponse.method = 'DELETE';
- const response = responseBuilderSimple(payload, '23848494844100489');
- expect(response).toEqual(expectedResponse);
- });
-
- it('Should throw error if payload is empty', () => {
- try {
- const response = responseBuilderSimple(payload, '');
- expect(response).toEqual();
- } catch (error) {
- expect(error.message).toEqual(`payload is not defined`);
- }
- });
- });
-
- describe('getUpdatedDataElement function tests', () => {
- const testCases = [
- {
- name: 'Should hash field if isHashRequired is set to true',
- initialData: [],
- isHashRequired: true,
- field: 'FN',
- value: 'some-name',
- expected: ['59107c750fd5ee2758d1988f2bf12d9f110439221ebdb7997e70d6a2c1c5afda'],
- },
- {
- name: 'Should not hash field if isHashRequired is set to false',
- initialData: [],
- isHashRequired: false,
- field: 'FN',
- value: 'some-name',
- expected: ['some-name'],
- },
- {
- name: 'Should not hash MADID and just pass value',
- initialData: [],
- isHashRequired: true,
- field: 'MADID',
- value: 'some-id',
- expected: ['some-id'],
- },
- {
- name: 'Should not hash EXTERN_ID and just pass value',
- initialData: ['some-id'],
- isHashRequired: true,
- field: 'EXTERN_ID',
- value: 'some-ext-id',
- expected: ['some-id', 'some-ext-id'],
- },
- {
- name: 'Should not hash MADID and just pass empty value if value does not exist',
- initialData: [],
- isHashRequired: true,
- field: 'MADID',
- value: '',
- expected: [''],
- },
- {
- name: 'Should not hash EXTERN_ID and just pass empty value if value does not exist',
- initialData: [''],
- isHashRequired: true,
- field: 'EXTERN_ID',
- value: '',
- expected: ['', ''],
- },
- {
- name: 'Should correctly parse LOOKALIKE_VALUE to given string number value',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: '5',
- expected: [5],
- },
- {
- name: 'Should default LOOKALIKE_VALUE to 0 when value is negative',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: '-5',
- expected: [0],
- },
- {
- name: 'Should default LOOKALIKE_VALUE to 0 when value is NaN',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: 'not-a-number',
- expected: [0],
- },
- {
- name: 'Should default LOOKALIKE_VALUE to 0 when value is Infinity',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: Infinity,
- expected: [0],
- },
- {
- name: 'Should default LOOKALIKE_VALUE to 0 when value is null',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: null,
- expected: [0],
- },
- {
- name: 'Should default LOOKALIKE_VALUE to 0 when value is undefined',
- initialData: [],
- isHashRequired: true,
- field: 'LOOKALIKE_VALUE',
- value: undefined,
- expected: [0],
- },
- {
- name: 'Should correctly parse small float LOOKALIKE_VALUE',
- initialData: [],
- isHashRequired: false,
- field: 'LOOKALIKE_VALUE',
- value: '0.0001',
- expected: [0.0001],
- },
- ];
-
- testCases.forEach(({ name, initialData, isHashRequired, field, value, expected }) => {
- it(name, () => {
- const result = getUpdatedDataElement([...initialData], isHashRequired, field, value);
- expect(result).toEqual(expected);
- });
- });
- });
-});
diff --git a/src/v0/destinations/fb_custom_audience/util.test.ts b/src/v0/destinations/fb_custom_audience/util.test.ts
new file mode 100644
index 00000000000..d7a8423b600
--- /dev/null
+++ b/src/v0/destinations/fb_custom_audience/util.test.ts
@@ -0,0 +1,465 @@
+import {
+ getDataSource,
+ responseBuilderSimple,
+ getUpdatedDataElement,
+ ensureApplicableFormat,
+} from './util';
+import { getEndPoint, ENDPOINT_PATH } from './config';
+jest.mock('../../../util/stats', () => ({
+ increment: jest.fn(),
+}));
+
+import stats from '../../../util/stats';
+import type { WrappedResponse } from './types';
+const basePayload = {
+ responseField: {
+ access_token: 'ABC',
+ payload: {
+ schema: ['EMAIL', 'FI'],
+ data: [
+ [
+ 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36',
+ '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16',
+ ],
+ ],
+ },
+ },
+ operationCategory: '',
+};
+
+const baseResponse = {
+ version: '1',
+ type: 'REST',
+ endpoint: getEndPoint('23848494844100489'),
+ endpointPath: ENDPOINT_PATH,
+ headers: {},
+ params: {
+ access_token: 'ABC',
+ payload: {
+ schema: ['EMAIL', 'FI'],
+ data: [
+ [
+ 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36',
+ '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16',
+ ],
+ ],
+ },
+ },
+ body: {
+ JSON: {},
+ JSON_ARRAY: {},
+ XML: {},
+ FORM: {},
+ },
+ files: {},
+ method: '',
+};
+
+const TEST_WORKSPACE_ID = 'ws-1';
+const TEST_DESTINATION_ID = 'dest-1';
+
+describe('ensureApplicableFormat', () => {
+ describe('PHONE', () => {
+ const cases = [
+ { input: '+1 (650) 555-1212', expected: '16505551212' },
+ { input: '00919876543210', expected: '919876543210' },
+ { input: '+44 20 7946 0958', expected: '442079460958' },
+ ];
+ cases.forEach(({ input, expected }) => {
+ it(`"${input}" → "${expected}"`, () => {
+ expect(ensureApplicableFormat('PHONE', input, TEST_WORKSPACE_ID, TEST_DESTINATION_ID)).toBe(
+ expected,
+ );
+ });
+ });
+ });
+
+ describe('FN / LN — lowercase, remove ASCII punctuation, preserve spaces and UTF-8', () => {
+ const cases = [
+ { input: 'Mary', expected: 'mary' },
+ { input: 'Valéry', expected: 'valéry' },
+ { input: '정', expected: '정' },
+ { input: "O'Brien", expected: 'obrien' },
+ { input: 'John Smith1', expected: 'john smith1' },
+ { input: 'Mary-Jane', expected: 'maryjane' },
+ ];
+ (['FN', 'LN'] as const).forEach((field) => {
+ cases.forEach(({ input, expected }) => {
+ it(`${field}: "${input}" → "${expected}"`, () => {
+ expect(ensureApplicableFormat(field, input, TEST_WORKSPACE_ID, TEST_DESTINATION_ID)).toBe(
+ expected,
+ );
+ });
+ });
+ });
+ });
+
+ describe('COUNTRY — lowercase, must be exactly two alpha characters', () => {
+ const validCases = [
+ { input: 'US', expected: 'us' },
+ { input: 'in', expected: 'in' },
+ { input: 'GB', expected: 'gb' },
+ ];
+ validCases.forEach(({ input, expected }) => {
+ it(`valid: "${input}" → "${expected}"`, () => {
+ expect(
+ ensureApplicableFormat('COUNTRY', input, TEST_WORKSPACE_ID, TEST_DESTINATION_ID),
+ ).toBe(expected);
+ });
+ });
+
+ const invalidCases = [
+ { input: 'USA', description: 'three letters' },
+ { input: 'U', description: 'single letter' },
+ { input: 'U1', description: 'contains digit' },
+ { input: '12', description: 'all digits' },
+ { input: '', description: 'empty string' },
+ ];
+ invalidCases.forEach(({ input, description }) => {
+ it(`invalid (${description}): "${input}" → passes through when reject disabled`, () => {
+ const result = ensureApplicableFormat(
+ 'COUNTRY',
+ input,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ );
+ expect(result).toBe(input.toLowerCase());
+ });
+ });
+
+ it('invalid country code → increments stats counter and returns empty string when reject enabled', () => {
+ const mockStatsIncrement = stats.increment as jest.Mock;
+ mockStatsIncrement.mockClear();
+ process.env.FB_CUSTOM_AUDIENCE_REJECT_INVALID_FIELDS = 'true';
+ try {
+ const result = ensureApplicableFormat(
+ 'COUNTRY',
+ 'USA',
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ );
+ expect(result).toBe('');
+ expect(mockStatsIncrement).toHaveBeenCalledWith('fb_custom_audience_invalid_country_code', {
+ workspaceId: TEST_WORKSPACE_ID,
+ destinationId: TEST_DESTINATION_ID,
+ });
+ } finally {
+ delete process.env.FB_CUSTOM_AUDIENCE_REJECT_INVALID_FIELDS;
+ }
+ });
+ });
+
+ describe('ZIP — remove spaces and dashes, lowercase', () => {
+ const cases = [
+ { input: '94035-1234', expected: '940351234' },
+ { input: 'M1 1AE', expected: 'm11ae' },
+ { input: '75018', expected: '75018' },
+ { input: ' K1A 0A6 ', expected: 'k1a0a6' },
+ ];
+ cases.forEach(({ input, expected }) => {
+ it(`"${input}" → "${expected}"`, () => {
+ expect(ensureApplicableFormat('ZIP', input, TEST_WORKSPACE_ID, TEST_DESTINATION_ID)).toBe(
+ expected,
+ );
+ });
+ });
+ });
+});
+
+describe('FB_custom_audience utils test', () => {
+ describe('getDataSource function tests', () => {
+ it('Should return empty datasource if type and subType are both NA', () => {
+ const expectedDataSource = {};
+ const dataSource = getDataSource('NA', 'NA');
+ expect(dataSource).toEqual(expectedDataSource);
+ });
+ it('Should set subType and type if value present in destination config macthes with preset list', () => {
+ const expectedDataSource = {
+ type: 'EVENT_BASED',
+ };
+ const dataSource = getDataSource('EVENT_BASED', 'something');
+ expect(dataSource).toEqual(expectedDataSource);
+ });
+ });
+
+ describe('responseBuilderSimple function tests', () => {
+ it('Should return correct response for add payload', () => {
+ const payload = basePayload;
+ payload.operationCategory = 'add';
+ const expectedResponse = baseResponse;
+ expectedResponse.method = 'POST';
+ const response = responseBuilderSimple(payload, '23848494844100489');
+ expect(response).toEqual(expectedResponse);
+ });
+
+ it('Should return correct response for delete payload', () => {
+ const payload = basePayload;
+ payload.operationCategory = 'remove';
+ const expectedResponse = baseResponse;
+ expectedResponse.method = 'DELETE';
+ const response = responseBuilderSimple(payload, '23848494844100489');
+ expect(response).toEqual(expectedResponse);
+ });
+
+ it('Should throw error if payload is empty', () => {
+ let emptyPayload: WrappedResponse | undefined;
+ expect.assertions(1);
+ try {
+ responseBuilderSimple(emptyPayload, '');
+ } catch (error: any) {
+ expect(error.message).toEqual('Payload could not be constructed');
+ }
+ });
+ });
+
+ describe('getUpdatedDataElement function tests', () => {
+ const testCases = [
+ {
+ name: 'Should hash field if isHashRequired is set to true',
+ initialData: [],
+ isHashRequired: true,
+ field: 'FN',
+ value: 'some-name',
+ expected: ['59107c750fd5ee2758d1988f2bf12d9f110439221ebdb7997e70d6a2c1c5afda'],
+ },
+ {
+ name: 'Should not hash field if isHashRequired is set to false',
+ initialData: [],
+ isHashRequired: false,
+ field: 'FN',
+ value: 'some-name',
+ expected: ['some-name'],
+ },
+ {
+ name: 'Should not hash MADID and just pass value',
+ initialData: [],
+ isHashRequired: true,
+ field: 'MADID',
+ value: 'some-id',
+ expected: ['some-id'],
+ },
+ {
+ name: 'Should not hash EXTERN_ID and just pass value',
+ initialData: ['some-id'],
+ isHashRequired: true,
+ field: 'EXTERN_ID',
+ value: 'some-ext-id',
+ expected: ['some-id', 'some-ext-id'],
+ },
+ {
+ name: 'Should not hash MADID and just pass empty value if value does not exist',
+ initialData: [],
+ isHashRequired: true,
+ field: 'MADID',
+ value: '',
+ expected: [''],
+ },
+ {
+ name: 'Should not hash EXTERN_ID and just pass empty value if value does not exist',
+ initialData: [''],
+ isHashRequired: true,
+ field: 'EXTERN_ID',
+ value: '',
+ expected: ['', ''],
+ },
+ {
+ name: 'Should correctly parse LOOKALIKE_VALUE to given string number value',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: '5',
+ expected: [5],
+ },
+ {
+ name: 'Should default LOOKALIKE_VALUE to 0 when value is negative',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: '-5',
+ expected: [0],
+ },
+ {
+ name: 'Should default LOOKALIKE_VALUE to 0 when value is NaN',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: 'not-a-number',
+ expected: [0],
+ },
+ {
+ name: 'Should default LOOKALIKE_VALUE to 0 when value is Infinity',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: Infinity,
+ expected: [0],
+ },
+ {
+ name: 'Should default LOOKALIKE_VALUE to 0 when value is null',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: null,
+ expected: [0],
+ },
+ {
+ name: 'Should default LOOKALIKE_VALUE to 0 when value is undefined',
+ initialData: [],
+ isHashRequired: true,
+ field: 'LOOKALIKE_VALUE',
+ value: undefined,
+ expected: [0],
+ },
+ {
+ name: 'Should correctly parse small float LOOKALIKE_VALUE',
+ initialData: [],
+ isHashRequired: false,
+ field: 'LOOKALIKE_VALUE',
+ value: '0.0001',
+ expected: [0.0001],
+ },
+ ];
+
+ testCases.forEach(({ name, initialData, isHashRequired, field, value, expected }) => {
+ it(name, () => {
+ const result = getUpdatedDataElement(
+ [...initialData],
+ isHashRequired,
+ field,
+ value,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ );
+ expect(result).toEqual(expected);
+ });
+ });
+
+ describe('validateHashingConsistency function tests', () => {
+ const hashedValue = 'b94d27b9934d3e08a52e52d7da7dabfac484efe04294e576ca48e1cb0d7d6267'; // sha256 of 'test'
+ const plaintextEmail = 'user@example.com';
+ const mockStatsIncrement = stats.increment as jest.Mock;
+
+ beforeEach(() => {
+ mockStatsIncrement.mockClear();
+ });
+
+ afterEach(() => {
+ delete process.env.AUDIENCE_HASHING_VALIDATION_ENABLED;
+ });
+
+ it('Hashing ON + pre-hashed value → emits metric and throws when validation enabled', () => {
+ process.env.AUDIENCE_HASHING_VALIDATION_ENABLED = 'true';
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ true,
+ 'EMAIL',
+ hashedValue,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).toThrow(
+ 'Hashing is enabled but the value for field EMAIL appears to already be hashed. Either disable hashing or send unhashed data.',
+ );
+ expect(mockStatsIncrement).toHaveBeenCalledWith('audience_hashing_inconsistency', {
+ propertyName: 'EMAIL',
+ type: 'hashed_when_hash_enabled',
+ workspaceId: 'ws-1',
+ destinationId: 'dest-1',
+ destType: 'fb_custom_audience',
+ });
+ });
+
+ it('Hashing ON + plaintext value → no error, no metric', () => {
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ true,
+ 'EMAIL',
+ plaintextEmail,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).not.toThrow();
+ expect(mockStatsIncrement).not.toHaveBeenCalled();
+ });
+
+ it('Hashing OFF + plaintext value → emits metric and throws when validation enabled', () => {
+ process.env.AUDIENCE_HASHING_VALIDATION_ENABLED = 'true';
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ false,
+ 'EMAIL',
+ plaintextEmail,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).toThrow(
+ 'Hashing is disabled but the value for field EMAIL appears to be unhashed. Either enable hashing or send pre-hashed data.',
+ );
+ expect(mockStatsIncrement).toHaveBeenCalledWith('audience_hashing_inconsistency', {
+ propertyName: 'EMAIL',
+ type: 'unhashed_when_hash_disabled',
+ workspaceId: 'ws-1',
+ destinationId: 'dest-1',
+ destType: 'fb_custom_audience',
+ });
+ });
+
+ it('Hashing OFF + 64-char hex value → no error, no metric', () => {
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ false,
+ 'EMAIL',
+ hashedValue,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).not.toThrow();
+ expect(mockStatsIncrement).not.toHaveBeenCalled();
+ });
+
+ it('Validation disabled (default) + hashing ON + pre-hashed value → emits metric but no throw', () => {
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ true,
+ 'EMAIL',
+ hashedValue,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).not.toThrow();
+ expect(mockStatsIncrement).toHaveBeenCalledWith('audience_hashing_inconsistency', {
+ propertyName: 'EMAIL',
+ type: 'hashed_when_hash_enabled',
+ workspaceId: 'ws-1',
+ destinationId: 'dest-1',
+ destType: 'fb_custom_audience',
+ });
+ });
+
+ it('Validation disabled (default) + hashing OFF + plaintext value → emits metric but no throw', () => {
+ expect(() =>
+ getUpdatedDataElement(
+ [],
+ false,
+ 'EMAIL',
+ plaintextEmail,
+ TEST_WORKSPACE_ID,
+ TEST_DESTINATION_ID,
+ ),
+ ).not.toThrow();
+ expect(mockStatsIncrement).toHaveBeenCalledWith('audience_hashing_inconsistency', {
+ propertyName: 'EMAIL',
+ type: 'unhashed_when_hash_disabled',
+ workspaceId: 'ws-1',
+ destinationId: 'dest-1',
+ destType: 'fb_custom_audience',
+ });
+ });
+ });
+ });
+});
diff --git a/src/v0/destinations/fb_custom_audience/util.js b/src/v0/destinations/fb_custom_audience/util.ts
similarity index 53%
rename from src/v0/destinations/fb_custom_audience/util.js
rename to src/v0/destinations/fb_custom_audience/util.ts
index 3455e185717..75dd25c5a6a 100644
--- a/src/v0/destinations/fb_custom_audience/util.js
+++ b/src/v0/destinations/fb_custom_audience/util.ts
@@ -1,23 +1,39 @@
-const lodash = require('lodash');
-const sha256 = require('sha256');
-const crypto = require('crypto');
-const jsonSize = require('json-size');
-const {
+import lodash from 'lodash';
+import sha256 from 'sha256';
+import crypto from 'crypto';
+import jsonSize from 'json-size';
+import validator from 'validator';
+import {
InstrumentationError,
ConfigurationError,
isDefinedAndNotNull,
convertToString,
-} = require('@rudderstack/integrations-lib');
-const { TransformationError } = require('@rudderstack/integrations-lib');
-const { typeFields, subTypeFields, getEndPoint } = require('./config');
-const {
+ TransformationError,
+} from '@rudderstack/integrations-lib';
+import type {
+ DataSource,
+ FbCustomAudiencePayload,
+ FbRecordMessage,
+ WrappedResponse,
+} from './types';
+import {
+ typeFields,
+ subTypeFields,
+ getEndPoint,
+ isRejectInvalidFieldsEnabled,
+ DESTINATION,
+} from './config';
+import {
defaultRequestConfig,
defaultPostRequestConfig,
defaultDeleteRequestConfig,
-} = require('../../util');
-const stats = require('../../../util/stats');
+} from '../../util';
+import stats from '../../../util/stats';
+import * as config from './config';
+import { validateHashingConsistency } from '../../util/audienceUtils';
-const config = require('./config');
+// ISO 3166-1 alpha-2: exactly two lowercase letters
+const COUNTRY_CODE_REGEX = /^[a-z]{2}$/;
/**
* Example payload ={
@@ -36,22 +52,27 @@ const config = require('./config');
]
]
} */
-const batchingWithPayloadSize = (payload) => {
+const batchingWithPayloadSize = (
+ payload: FbCustomAudiencePayload,
+ workspaceId: string,
+): FbCustomAudiencePayload[] => {
+ const maxPayloadSize = config.getMaxPayloadSize(workspaceId);
const payloadSize = jsonSize(payload);
- if (payloadSize > config.maxPayloadSize) {
- const revisedPayloadArray = [];
- const noOfBatches = Math.ceil(payloadSize / config.maxPayloadSize);
- const revisedRecordsPerPayload = Math.floor(payload.data.length / noOfBatches);
- const revisedDataArray = lodash.chunk(payload.data, revisedRecordsPerPayload);
- revisedDataArray.forEach((data) => {
- revisedPayloadArray.push({ ...payload, data });
+ if (payloadSize > maxPayloadSize) {
+ const revisedPayloadArray: FbCustomAudiencePayload[] = [];
+ const noOfBatches = Math.ceil(payloadSize / maxPayloadSize);
+ const data = payload.data!;
+ const revisedRecordsPerPayload = Math.floor(data.length / noOfBatches);
+ const revisedDataArray = lodash.chunk(data, revisedRecordsPerPayload);
+ revisedDataArray.forEach((chunk) => {
+ revisedPayloadArray.push({ ...payload, data: chunk });
});
return revisedPayloadArray;
}
return [payload];
};
-const getSchemaForEventMappedToDest = (message) => {
+const getSchemaForEventMappedToDest = (message: FbRecordMessage): string[] => {
const mappedSchema = message?.context?.destinationFields;
if (!mappedSchema) {
throw new InstrumentationError(
@@ -59,27 +80,45 @@ const getSchemaForEventMappedToDest = (message) => {
);
}
// context.destinationFields has 2 possible values. An Array of fields or Comma seperated string with field names
- let userSchema = Array.isArray(mappedSchema) ? mappedSchema : mappedSchema.split(',');
+ let userSchema = Array.isArray(mappedSchema)
+ ? mappedSchema
+ : (mappedSchema as unknown as string).split(',');
userSchema = userSchema.map((field) => field.trim());
return userSchema;
};
-// function responsible to ensure the user inputs are passed according to the allowed format
-const ensureApplicableFormat = (userProperty, userInformation) => {
- let updatedProperty;
- let userInformationTrimmed;
+/**
+ * Ensures user inputs are in the format required by Facebook Custom Audiences.
+ * Returns empty string for invalid field values.
+ */
+const ensureApplicableFormat = (
+ userProperty: string,
+ userInformation: unknown,
+ workspaceId: string,
+ destinationId: string,
+): unknown => {
+ let updatedProperty: unknown;
+ let userInformationTrimmed: string;
if (isDefinedAndNotNull(userInformation)) {
- const stringifiedUserInformation = convertToString(userInformation);
+ const stringifiedUserInformation = convertToString(userInformation).trim();
+ // https://developers.facebook.com/docs/marketing-api/conversions-api/parameters/customer-information-parameters
switch (userProperty) {
- case 'EMAIL':
- updatedProperty = stringifiedUserInformation.trim().toLowerCase();
+ case 'EMAIL': {
+ const emailValue = stringifiedUserInformation.toLowerCase();
+ if (validator.isEmail(emailValue)) {
+ updatedProperty = emailValue;
+ } else {
+ stats.increment('fb_custom_audience_invalid_email', { workspaceId, destinationId });
+ updatedProperty = isRejectInvalidFieldsEnabled() ? '' : emailValue;
+ }
break;
- case 'PHONE':
- // remove all non-numerical characters
- updatedProperty = stringifiedUserInformation.replace(/\D/g, '');
- // remove all leading zeros
- updatedProperty = updatedProperty.replace(/^0+/g, '');
+ }
+ case 'PHONE': {
+ // remove all non-numerical characters, then remove all leading zeros
+ updatedProperty = stringifiedUserInformation.replace(/\D/g, '').replace(/^0+/g, '');
+ // Note: libphonenumber-js is not used here as it requires a country code to validate, which may not always be present.
break;
+ }
case 'GEN':
updatedProperty =
stringifiedUserInformation.toLowerCase() === 'f' ||
@@ -88,7 +127,7 @@ const ensureApplicableFormat = (userProperty, userInformation) => {
: 'm';
break;
case 'DOBY':
- updatedProperty = stringifiedUserInformation.trim().replace(/\./g, '');
+ updatedProperty = stringifiedUserInformation.replace(/\./g, '');
break;
case 'DOBM':
case 'DOBD':
@@ -101,24 +140,35 @@ const ensureApplicableFormat = (userProperty, userInformation) => {
break;
case 'LN':
case 'FN':
+ // Remove ASCII punctuation (0x21-0x2F, 0x3A-0x40, 0x5B-0x60, 0x7B-0x7E).
+ // Preserves spaces, digits, accented letters, and all non-ASCII (UTF-8) characters.
+ updatedProperty = stringifiedUserInformation
+ .toLowerCase()
+ .replace(/[\x21-\x2F\x3A-\x40\x5B-\x60\x7B-\x7E]/g, '');
+ break;
case 'FI':
- if (userProperty !== 'FI') {
- updatedProperty = stringifiedUserInformation.toLowerCase().replace(/[^#$%&'*+/a-z]/g, '');
- } else {
- updatedProperty = stringifiedUserInformation
- .toLowerCase()
- .replace(/[^!"#$%&'()*+,-./a-z]/g, '');
- }
+ updatedProperty = stringifiedUserInformation
+ .toLowerCase()
+ .replace(/[^!"#$%&'()*+,-./a-z]/g, '');
break;
case 'MADID':
updatedProperty = stringifiedUserInformation.toLowerCase();
break;
- case 'COUNTRY':
- updatedProperty = stringifiedUserInformation.toLowerCase();
+ case 'COUNTRY': {
+ const countryCode = stringifiedUserInformation.toLowerCase();
+ if (COUNTRY_CODE_REGEX.test(countryCode)) {
+ updatedProperty = countryCode;
+ } else {
+ stats.increment('fb_custom_audience_invalid_country_code', {
+ workspaceId,
+ destinationId,
+ });
+ updatedProperty = isRejectInvalidFieldsEnabled() ? '' : countryCode;
+ }
break;
+ }
case 'ZIP':
- userInformationTrimmed = stringifiedUserInformation.replace(/\s/g, '');
- updatedProperty = userInformationTrimmed.toLowerCase();
+ updatedProperty = stringifiedUserInformation.replace(/[\s-]/g, '').toLowerCase();
break;
case 'ST':
case 'CT':
@@ -140,7 +190,20 @@ const ensureApplicableFormat = (userProperty, userInformation) => {
return updatedProperty;
};
-const getUpdatedDataElement = (dataElement, isHashRequired, propertyName, propertyValue) => {
+const getUpdatedDataElement = (
+ dataElement: unknown[],
+ isHashRequired: boolean,
+ propertyName: string,
+ propertyValue: unknown,
+ workspaceId: string,
+ destinationId: string,
+): unknown[] => {
+ const destination = {
+ workspaceId,
+ id: destinationId,
+ type: DESTINATION,
+ config: { isHashRequired },
+ };
// Normalize undefined/null to empty string
const normalizedValue = propertyValue ?? '';
@@ -162,9 +225,14 @@ const getUpdatedDataElement = (dataElement, isHashRequired, propertyName, proper
* Reference: https://developers.facebook.com/docs/marketing-api/audiences/guides/custom-audiences#hash
* Send an empty string for the properties for which the user hasn't provided any value.
*/
- const isHashable = isHashRequired && propertyName !== 'MADID' && propertyName !== 'EXTERN_ID';
+ const isHashableField = propertyName !== 'MADID' && propertyName !== 'EXTERN_ID';
+ const shouldHash = isHashRequired && isHashableField;
- if (isHashable) {
+ if (isHashableField) {
+ validateHashingConsistency(propertyName, String(normalizedValue), destination);
+ }
+
+ if (shouldHash) {
dataElement.push(normalizedValue ? sha256(String(normalizedValue)) : '');
} else {
dataElement.push(normalizedValue);
@@ -176,25 +244,31 @@ const getUpdatedDataElement = (dataElement, isHashRequired, propertyName, proper
// Function responsible for making the data field without payload object
// Based on the "isHashRequired" value hashing is explicitly enabled or disabled
const prepareDataField = (
- userSchema,
- userUpdateList,
- isHashRequired,
- disableFormat,
- destinationId,
-) => {
- const data = [];
+ userSchema: string[],
+ userUpdateList: Record[],
+ isHashRequired: boolean,
+ disableFormat: boolean,
+ destinationId: string,
+ workspaceId: string,
+): unknown[][] => {
+ const data: unknown[][] = [];
let nullEvent = true; // flag to check for bad events (all user properties are null)
userUpdateList.forEach((eachUser) => {
- let dataElement = [];
+ let dataElement: unknown[] = [];
let nullUserData = true; // flag to check for bad event (all properties are null for a user)
userSchema.forEach((eachProperty) => {
const userProperty = eachUser[eachProperty];
- let updatedProperty = userProperty;
+ let updatedProperty: unknown = userProperty;
if (isHashRequired && !disableFormat) {
- updatedProperty = ensureApplicableFormat(eachProperty, userProperty);
+ updatedProperty = ensureApplicableFormat(
+ eachProperty,
+ userProperty,
+ workspaceId,
+ destinationId,
+ );
}
dataElement = getUpdatedDataElement(
@@ -202,6 +276,8 @@ const prepareDataField = (
isHashRequired,
eachProperty,
updatedProperty,
+ workspaceId,
+ destinationId,
);
if (dataElement[dataElement.length - 1]) {
@@ -211,10 +287,9 @@ const prepareDataField = (
});
if (nullUserData) {
- stats.increment('fb_custom_audience_event_having_all_null_field_values_for_a_user', {
- destinationId,
- nullFields: userSchema,
- });
+ throw new InstrumentationError(
+ `All user properties [${userSchema.join(', ')}] are invalid or null. At least one valid field is required.`,
+ );
}
data.push(dataElement);
@@ -230,7 +305,11 @@ const prepareDataField = (
};
// ref: https://developers.facebook.com/docs/facebook-login/security/#generate-the-proof
-const generateAppSecretProof = (accessToken, appSecret, dateNow) => {
+const generateAppSecretProof = (
+ accessToken: string,
+ appSecret: string,
+ dateNow: number,
+): string => {
const currentTime = Math.floor(dateNow / 1000); // Get current Unix time in seconds
const data = `${accessToken}|${currentTime}`;
@@ -242,8 +321,8 @@ const generateAppSecretProof = (accessToken, appSecret, dateNow) => {
return appsecretProof;
};
-const getDataSource = (type, subType) => {
- const dataSource = {};
+const getDataSource = (type: string | undefined, subType: string | undefined): DataSource => {
+ const dataSource: DataSource = {};
if (type && type !== 'NA' && typeFields.includes(type)) {
dataSource.type = type;
}
@@ -253,7 +332,7 @@ const getDataSource = (type, subType) => {
return dataSource;
};
-const responseBuilderSimple = (payload, audienceId) => {
+const responseBuilderSimple = (payload: WrappedResponse | undefined, audienceId: string) => {
if (payload) {
const responseParams = payload.responseField;
const response = defaultRequestConfig();
@@ -274,7 +353,7 @@ const responseBuilderSimple = (payload, audienceId) => {
throw new TransformationError(`Payload could not be constructed`);
};
-module.exports = {
+export {
prepareDataField,
getSchemaForEventMappedToDest,
batchingWithPayloadSize,
diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/transform.js b/src/v0/destinations/google_adwords_enhanced_conversions/transform.js
index 156eb92d201..78396d9661a 100644
--- a/src/v0/destinations/google_adwords_enhanced_conversions/transform.js
+++ b/src/v0/destinations/google_adwords_enhanced_conversions/transform.js
@@ -14,6 +14,7 @@ const {
const { trackMapping } = require('./config');
const { JSON_MIME_TYPE } = require('../../util/constant');
+const { isCustomAdjustmentTypeSupported } = require('./utils');
/**
* This function is helping to update the mappingJson.
@@ -80,7 +81,7 @@ const processTrackEvent = async (metadata, message, destination) => {
let flag = false;
const { Config } = destination;
const { event } = message;
- const { listOfConversions } = Config;
+ const { listOfConversions, adjustmentType } = Config;
if (listOfConversions.some((i) => i.conversions === event)) {
flag = true;
}
@@ -108,6 +109,16 @@ const processTrackEvent = async (metadata, message, destination) => {
// Removing the null values from userIdentifier
const arr = payload.conversionAdjustments[0].userIdentifiers;
payload.conversionAdjustments[0].userIdentifiers = arr.filter((item) => !!item);
+
+ if (
+ isCustomAdjustmentTypeSupported(metadata?.workspaceId) &&
+ adjustmentType &&
+ adjustmentType === 'RESTATEMENT'
+ ) {
+ payload.conversionAdjustments[0].adjustmentType = adjustmentType;
+ delete payload.conversionAdjustments[0].userIdentifiers;
+ delete payload.conversionAdjustments[0].userAgent;
+ }
return responseBuilder(metadata, message, destination, payload);
};
diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/utils.ts b/src/v0/destinations/google_adwords_enhanced_conversions/utils.ts
new file mode 100644
index 00000000000..aa3650208c8
--- /dev/null
+++ b/src/v0/destinations/google_adwords_enhanced_conversions/utils.ts
@@ -0,0 +1,30 @@
+const { isDefinedAndNotNull } = require('../../util');
+
+let adjustmentTypeSupportedWorkspaceIds: string | Map = 'NONE';
+if (isDefinedAndNotNull(process.env.DEST_GAEC_ADJUSTMENT_TYPE_SUPPORTED_WORKSPACE_IDS)) {
+ const supportedWorkspaceIds = process.env.DEST_GAEC_ADJUSTMENT_TYPE_SUPPORTED_WORKSPACE_IDS!;
+ switch (supportedWorkspaceIds) {
+ case 'ALL':
+ adjustmentTypeSupportedWorkspaceIds = 'ALL';
+ break;
+ case 'NONE':
+ adjustmentTypeSupportedWorkspaceIds = 'NONE';
+ break;
+ default:
+ adjustmentTypeSupportedWorkspaceIds = new Map(
+ supportedWorkspaceIds.split(',').map((s) => [s.trim(), true]),
+ );
+ }
+}
+
+const isCustomAdjustmentTypeSupported = (workspaceId: string) => {
+ if (adjustmentTypeSupportedWorkspaceIds === 'ALL') {
+ return true;
+ }
+ if (adjustmentTypeSupportedWorkspaceIds === 'NONE') {
+ return false;
+ }
+ return (adjustmentTypeSupportedWorkspaceIds as Map).has(workspaceId);
+};
+
+export { isCustomAdjustmentTypeSupported };
diff --git a/src/v0/destinations/google_adwords_offline_conversions/config.js b/src/v0/destinations/google_adwords_offline_conversions/config.js
index ec8608b2877..0e0ecbee4f6 100644
--- a/src/v0/destinations/google_adwords_offline_conversions/config.js
+++ b/src/v0/destinations/google_adwords_offline_conversions/config.js
@@ -1,20 +1,20 @@
const { getMappingConfig } = require('../../util');
-const API_VERSION = 'v19';
+const API_VERSION = 'v22';
const CUSTOMER_ID_PARAM = ':customerId';
const BASE_ENDPOINT = `https://googleads.googleapis.com/${API_VERSION}/customers/${CUSTOMER_ID_PARAM}`;
-// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v19/customers/uploadClickConversions
+// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v22/customers/uploadClickConversions
const CLICK_CONVERSION_ENDPOINT_PATH = 'uploadClickConversions';
const CLICK_CONVERSION = `${BASE_ENDPOINT}:${CLICK_CONVERSION_ENDPOINT_PATH}`;
-// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v19/customers/uploadCallConversions
+// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v22/customers/uploadCallConversions
const CALL_CONVERSION_ENDPOINT_PATH = 'uploadCallConversions';
const CALL_CONVERSION = `${BASE_ENDPOINT}:${CALL_CONVERSION_ENDPOINT_PATH}`;
-// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v19/customers.googleAds/searchStream
+// Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v22/customers.googleAds/searchStream
const SEARCH_STREAM_ENDPOINT_PATH = 'searchStream';
const SEARCH_STREAM = `${BASE_ENDPOINT}/googleAds:${SEARCH_STREAM_ENDPOINT_PATH}`;
diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.test.js b/src/v0/destinations/google_adwords_offline_conversions/utils.test.js
index d6b508224c9..099664ee806 100644
--- a/src/v0/destinations/google_adwords_offline_conversions/utils.test.js
+++ b/src/v0/destinations/google_adwords_offline_conversions/utils.test.js
@@ -7,9 +7,11 @@ const {
getCallConversionPayload,
getAddConversionPayload,
} = require('./utils');
-const { CLICK_CONVERSION_ENDPOINT_PATH, CALL_CONVERSION_ENDPOINT_PATH } = require('./config');
-
-const API_VERSION = 'v19';
+const {
+ CLICK_CONVERSION_ENDPOINT_PATH,
+ CALL_CONVERSION_ENDPOINT_PATH,
+ API_VERSION,
+} = require('./config');
const getTestMessage = () => {
let message = {
@@ -327,7 +329,7 @@ describe('getCallConversionPayload', () => {
});
expect(result).toEqual({
endpointDetails: {
- endpoint: 'https://googleads.googleapis.com/v19/customers/9625812972:uploadCallConversions',
+ endpoint: 'https://googleads.googleapis.com/v22/customers/9625812972:uploadCallConversions',
path: CALL_CONVERSION_ENDPOINT_PATH,
},
payload: {
@@ -359,7 +361,7 @@ describe('getCallConversionPayload', () => {
});
expect(result).toEqual({
endpointDetails: {
- endpoint: 'https://googleads.googleapis.com/v19/customers/9625812972:uploadCallConversions',
+ endpoint: 'https://googleads.googleapis.com/v22/customers/9625812972:uploadCallConversions',
path: CALL_CONVERSION_ENDPOINT_PATH,
},
payload: {
@@ -388,7 +390,7 @@ describe('getCallConversionPayload', () => {
const result = getCallConversionPayload(message, '9625812972', {});
expect(result).toEqual({
endpointDetails: {
- endpoint: 'https://googleads.googleapis.com/v19/customers/9625812972:uploadCallConversions',
+ endpoint: 'https://googleads.googleapis.com/v22/customers/9625812972:uploadCallConversions',
path: CALL_CONVERSION_ENDPOINT_PATH,
},
payload: {
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/config.js b/src/v0/destinations/google_adwords_remarketing_lists/config.ts
similarity index 63%
rename from src/v0/destinations/google_adwords_remarketing_lists/config.js
rename to src/v0/destinations/google_adwords_remarketing_lists/config.ts
index b450caacaaf..30733e702a0 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/config.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/config.ts
@@ -1,6 +1,6 @@
-const { getMappingConfig } = require('../../util');
+import { getMappingConfig } from '../../util';
-const API_VERSION = 'v19';
+const API_VERSION = 'v22';
const OFFLINE_USER_DATA_JOBS_ENDPOINT = 'offlineUserDataJobs';
const BASE_ENDPOINT = `https://googleads.googleapis.com/${API_VERSION}/customers`;
@@ -9,32 +9,36 @@ const CONFIG_CATEGORIES = {
ADDRESSINFO: { type: 'addressInfo', name: 'addressInfo' },
};
const ADDRESS_INFO_ATTRIBUTES = ['firstName', 'lastName', 'country', 'postalCode'];
-const attributeMapping = {
+const attributeMapping: Record = {
email: 'hashedEmail',
phone: 'hashedPhoneNumber',
};
const hashAttributes = ['email', 'phone', 'firstName', 'lastName'];
const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname);
-const TYPEOFLIST = Object.freeze({
+const TYPEOFLIST: Readonly> = Object.freeze({
userID: 'thirdPartyUserId',
mobileDeviceID: 'mobileId',
});
-const consentConfigMap = {
+const consentConfigMap: Record = {
personalizationConsent: 'adPersonalization',
userDataConsent: 'adUserData',
};
-module.exports = {
+const offlineDataJobsMapping = MAPPING_CONFIG[CONFIG_CATEGORIES.AUDIENCE_LIST.name];
+const addressInfoMapping = MAPPING_CONFIG[CONFIG_CATEGORIES.ADDRESSINFO.name];
+const destType = 'google_adwords_remarketing_lists';
+
+export {
API_VERSION,
OFFLINE_USER_DATA_JOBS_ENDPOINT,
BASE_ENDPOINT,
TYPEOFLIST,
attributeMapping,
hashAttributes,
- offlineDataJobsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.AUDIENCE_LIST.name],
- addressInfoMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.ADDRESSINFO.name],
+ offlineDataJobsMapping,
+ addressInfoMapping,
ADDRESS_INFO_ATTRIBUTES,
consentConfigMap,
- destType: 'google_adwords_remarketing_lists',
+ destType,
};
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.ts
similarity index 70%
rename from src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js
rename to src/v0/destinations/google_adwords_remarketing_lists/networkHandler.ts
index 727b1ededed..6a509f343d8 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.ts
@@ -1,14 +1,11 @@
-const { NetworkError } = require('@rudderstack/integrations-lib');
-const get = require('get-value');
-const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network');
-const { isHttpStatusSuccess } = require('../../util/index');
-const {
- processAxiosResponse,
- getDynamicErrorType,
-} = require('../../../adapters/utils/networkUtils');
-const tags = require('../../util/tags');
-const { getAuthErrCategory } = require('../../util/googleUtils');
-const { getDeveloperToken } = require('../../util/googleUtils');
+import { NetworkError } from '@rudderstack/integrations-lib';
+import get from 'get-value';
+import { prepareProxyRequest, handleHttpRequest } from '../../../adapters/network';
+import { isHttpStatusSuccess } from '../../util/index';
+import { processAxiosResponse, getDynamicErrorType } from '../../../adapters/utils/networkUtils';
+import tags from '../../util/tags';
+import { getAuthErrCategory, getDeveloperToken } from '../../util/googleUtils';
+import type { OfflineDataJobPayload } from './types';
/**
* This function helps to create a offlineUserDataJobs
* @param endpoint
@@ -20,9 +17,21 @@ const { getDeveloperToken } = require('../../util/googleUtils');
* ref: https://developers.google.com/google-ads/api/rest/reference/rest/v15/CustomerMatchUserListMetadata
*/
-const createJob = async ({ endpoint, headers, method, params, metadata }) => {
+const createJob = async ({
+ endpoint,
+ headers,
+ method,
+ params,
+ metadata,
+}: {
+ endpoint: string;
+ headers: Record;
+ method: string;
+ params: { customerId: string; listId: string; consent: Record };
+ metadata: unknown;
+}) => {
const jobCreatingUrl = `${endpoint}:create`;
- const customerMatchUserListMetadata = {
+ const customerMatchUserListMetadata: Record = {
userList: `customers/${params.customerId}/userLists/${params.listId}`,
};
if (Object.keys(params.consent).length > 0) {
@@ -58,7 +67,21 @@ const createJob = async ({ endpoint, headers, method, params, metadata }) => {
* @param body
*/
-const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata }) => {
+const addUserToJob = async ({
+ endpoint,
+ headers,
+ method,
+ jobId,
+ body,
+ metadata,
+}: {
+ endpoint: string;
+ headers: Record;
+ method: string;
+ jobId: string;
+ body: { JSON: OfflineDataJobPayload };
+ metadata: unknown;
+}) => {
const jobAddingUrl = `${endpoint}/${jobId}:addOperations`;
const secondRequest = {
url: jobAddingUrl,
@@ -84,7 +107,19 @@ const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata }
* @param method
* @param jobId
*/
-const runTheJob = async ({ endpoint, headers, method, jobId, metadata }) => {
+const runTheJob = async ({
+ endpoint,
+ headers,
+ method,
+ jobId,
+ metadata,
+}: {
+ endpoint: string;
+ headers: Record;
+ method: string;
+ jobId: string;
+ metadata: unknown;
+}) => {
const jobRunningUrl = `${endpoint}/${jobId}:run`;
const thirdRequest = {
url: jobRunningUrl,
@@ -108,7 +143,14 @@ const runTheJob = async ({ endpoint, headers, method, jobId, metadata }) => {
* @param {*} request
* @returns
*/
-const gaAudienceProxyRequest = async (request) => {
+const gaAudienceProxyRequest = async (request: {
+ body: { JSON: OfflineDataJobPayload };
+ method: string;
+ params: { customerId: string; listId: string; consent: Record };
+ endpoint: string;
+ metadata: unknown;
+ headers: Record;
+}) => {
const { body, method, params, endpoint, metadata } = request;
const { headers } = request;
@@ -129,11 +171,18 @@ const gaAudienceProxyRequest = async (request) => {
}
// step2: putting users into the job
- let jobId;
+ let jobId: string | undefined;
if (firstResponse?.response?.data?.resourceName)
// eslint-disable-next-line prefer-destructuring
jobId = firstResponse.response.data.resourceName.split('/')[3];
- const secondResponse = await addUserToJob({ endpoint, headers, method, jobId, body, metadata });
+ const secondResponse = await addUserToJob({
+ endpoint,
+ headers,
+ method,
+ jobId: jobId!,
+ body,
+ metadata,
+ });
if (!secondResponse.success && !isHttpStatusSuccess(secondResponse?.response?.status)) {
return secondResponse;
}
@@ -146,11 +195,14 @@ const gaAudienceProxyRequest = async (request) => {
}
// step3: running the job
- const thirdResponse = await runTheJob({ endpoint, headers, method, jobId, metadata });
+ const thirdResponse = await runTheJob({ endpoint, headers, method, jobId: jobId!, metadata });
return thirdResponse;
};
-const gaAudienceRespHandler = (destResponse, stageMsg) => {
+const gaAudienceRespHandler = (
+ destResponse: { status: number; response: unknown },
+ stageMsg: string,
+) => {
let { status } = destResponse;
const { response } = destResponse;
@@ -172,7 +224,12 @@ const gaAudienceRespHandler = (destResponse, stageMsg) => {
);
};
-const responseHandler = (responseParams) => {
+const responseHandler = (responseParams: {
+ destinationResponse: {
+ status: number;
+ response: { partialFailureError?: { code: number } };
+ };
+}) => {
const { destinationResponse } = responseParams;
const message = `Request Processed Successfully`;
const { status, response } = destinationResponse;
@@ -206,10 +263,15 @@ const responseHandler = (responseParams) => {
return undefined;
};
-function networkHandler() {
+function networkHandler(this: {
+ proxy: typeof gaAudienceProxyRequest;
+ processAxiosResponse: typeof processAxiosResponse;
+ prepareProxy: typeof prepareProxyRequest;
+ responseHandler: typeof responseHandler;
+}) {
this.proxy = gaAudienceProxyRequest;
this.processAxiosResponse = processAxiosResponse;
this.prepareProxy = prepareProxyRequest;
this.responseHandler = responseHandler;
}
-module.exports = { networkHandler };
+export { networkHandler };
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/recordTransform.js b/src/v0/destinations/google_adwords_remarketing_lists/recordTransform.ts
similarity index 78%
rename from src/v0/destinations/google_adwords_remarketing_lists/recordTransform.js
rename to src/v0/destinations/google_adwords_remarketing_lists/recordTransform.ts
index 3cb110f8faa..4b2d7951cc7 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/recordTransform.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/recordTransform.ts
@@ -1,11 +1,11 @@
-const {
+import {
InstrumentationError,
groupByInBatches,
mapInBatches,
reduceInBatches,
isDefinedAndNotNullAndNotEmpty,
-} = require('@rudderstack/integrations-lib');
-const {
+} from '@rudderstack/integrations-lib';
+import {
getAccessToken,
constructPayload,
returnArrayOfSubarrays,
@@ -14,17 +14,18 @@ const {
isEventSentByVDMV2Flow,
generateErrorObject,
getErrorRespEvents,
-} = require('../../util');
-const { populateConsentFromConfig } = require('../../util/googleUtils');
-const {
- populateIdentifiersForRecordEvent,
- responseBuilder,
- getOperationAudienceId,
-} = require('./util');
-const { getErrorResponse, createFinalResponse } = require('../../util/recordUtils');
-const { offlineDataJobsMapping, consentConfigMap } = require('./config');
-
-const processRecordEventArray = async (records, context, operationType) => {
+} from '../../util';
+import { populateConsentFromConfig } from '../../util/googleUtils';
+import { populateIdentifiersForRecordEvent, responseBuilder, getOperationAudienceId } from './util';
+import { getErrorResponse, createFinalResponse } from '../../util/recordUtils';
+import { offlineDataJobsMapping, consentConfigMap } from './config';
+import type { RecordEventContext, RecordInput } from './types';
+
+const processRecordEventArray = async (
+ records: RecordInput[],
+ context: RecordEventContext,
+ operationType: string,
+) => {
const {
message,
destination,
@@ -47,7 +48,7 @@ const processRecordEventArray = async (records, context, operationType) => {
isHashRequired,
);
- const outputPayload = constructPayload(message, offlineDataJobsMapping);
+ const outputPayload = constructPayload(message, offlineDataJobsMapping)!;
const userIdentifierChunks = returnArrayOfSubarrays(userIdentifiersList, 20);
outputPayload.operations = await mapInBatches(userIdentifierChunks, (chunk) => ({
@@ -66,7 +67,10 @@ const processRecordEventArray = async (records, context, operationType) => {
return getSuccessRespEvents(toSendEvents, metadata, destination, true);
};
-async function preparePayload(events, config) {
+async function preparePayload(
+ events: RecordInput[],
+ config: Omit,
+) {
/**
* If we are getting invalid identifiers, we are preparing empty object response for that event and that is ending up
* as an error from google ads api. So we are validating the identifiers and then processing the events.
@@ -94,7 +98,7 @@ async function preparePayload(events, config) {
}
return acc;
},
- { validEvents: [], invalidEvents: [] },
+ { validEvents: [] as RecordInput[], invalidEvents: [] as unknown[] },
);
if (validEvents.length === 0) {
@@ -104,20 +108,21 @@ async function preparePayload(events, config) {
const { destination, message, metadata } = validEvents[0];
const accessToken = getAccessToken(metadata, 'access_token');
- const context = {
+ const context: RecordEventContext = {
message,
destination,
accessToken,
...config,
};
- const groupedRecordsByAction = await groupByInBatches(validEvents, (record) =>
- record.message.action?.toLowerCase(),
+ const groupedRecordsByAction = await groupByInBatches(
+ validEvents,
+ (record) => record.message.action?.toLowerCase() || '',
);
const actionResponses = await reduceInBatches(
['delete', 'insert', 'update'],
- async (responses, action) => {
+ async (responses: Record, action: string) => {
const operationType = action === 'delete' ? 'remove' : 'create';
if (groupedRecordsByAction[action]) {
return {
@@ -151,7 +156,7 @@ async function preparePayload(events, config) {
return finalResponse;
}
-async function processEventStreamRecordV1Events(groupedRecordInputs) {
+async function processEventStreamRecordV1Events(groupedRecordInputs: RecordInput[]) {
const { destination } = groupedRecordInputs[0];
const {
audienceId,
@@ -174,7 +179,7 @@ async function processEventStreamRecordV1Events(groupedRecordInputs) {
return preparePayload(groupedRecordInputs, config);
}
-async function processVDMV1RecordEvents(groupedRecordInputs) {
+async function processVDMV1RecordEvents(groupedRecordInputs: RecordInput[]) {
const { destination, message } = groupedRecordInputs[0];
const {
audienceId,
@@ -197,20 +202,20 @@ async function processVDMV1RecordEvents(groupedRecordInputs) {
return preparePayload(groupedRecordInputs, config);
}
-async function processVDMV2RecordEvents(groupedRecordInputs) {
+async function processVDMV2RecordEvents(groupedRecordInputs: RecordInput[]) {
const { connection, message } = groupedRecordInputs[0];
const { audienceId, typeOfList, isHashRequired, userDataConsent, personalizationConsent } =
connection.config.destination;
const userSchema = message?.identifiers ? Object.keys(message.identifiers) : undefined;
- const events = await mapInBatches(groupedRecordInputs, (record) => ({
+ const events = (await mapInBatches(groupedRecordInputs, (record) => ({
...record,
message: {
...record.message,
fields: record.message.identifiers,
},
- }));
+ }))) as RecordInput[];
const config = {
audienceId,
@@ -224,7 +229,7 @@ async function processVDMV2RecordEvents(groupedRecordInputs) {
return preparePayload(events, config);
}
-async function processRecordInputs(groupedRecordInputs) {
+async function processRecordInputs(groupedRecordInputs: RecordInput[]) {
const event = groupedRecordInputs[0];
if (isEventSentByVDMV1Flow(event)) {
@@ -236,6 +241,4 @@ async function processRecordInputs(groupedRecordInputs) {
return processEventStreamRecordV1Events(groupedRecordInputs);
}
-module.exports = {
- processRecordInputs,
-};
+export { processRecordInputs };
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/transform.js b/src/v0/destinations/google_adwords_remarketing_lists/transform.ts
similarity index 70%
rename from src/v0/destinations/google_adwords_remarketing_lists/transform.js
rename to src/v0/destinations/google_adwords_remarketing_lists/transform.ts
index da72f615040..5cdc7e17dad 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/transform.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/transform.ts
@@ -1,22 +1,23 @@
-const {
+import {
InstrumentationError,
ConfigurationError,
groupByInBatches,
-} = require('@rudderstack/integrations-lib');
-const logger = require('../../../logger');
-const {
+} from '@rudderstack/integrations-lib';
+import logger from '../../../logger';
+import {
returnArrayOfSubarrays,
constructPayload,
simpleProcessRouterDest,
getAccessToken,
-} = require('../../util');
+} from '../../util';
-const { populateConsentFromConfig } = require('../../util/googleUtils');
-const { offlineDataJobsMapping, consentConfigMap } = require('./config');
-const { processRecordInputs } = require('./recordTransform');
-const { populateIdentifiers, responseBuilder, getOperationAudienceId } = require('./util');
+import { populateConsentFromConfig } from '../../util/googleUtils';
+import { offlineDataJobsMapping, consentConfigMap } from './config';
+import { processRecordInputs } from './recordTransform';
+import { populateIdentifiers, responseBuilder, getOperationAudienceId } from './util';
+import type { GARLDestination, Message, OfflineDataJobPayload, RecordInput } from './types';
-function extraKeysPresent(dictionary, keyList) {
+function extraKeysPresent(dictionary: Record, keyList: string[]) {
// eslint-disable-next-line no-restricted-syntax
for (const key in dictionary) {
if (!keyList.includes(key)) {
@@ -36,12 +37,12 @@ function extraKeysPresent(dictionary, keyList) {
* @param {rudder event destination} destination
* @returns
*/
-const createPayload = (message, destination) => {
+const createPayload = (message: Message, destination: GARLDestination) => {
const { listData } = message.properties;
const properties = ['add', 'remove'];
const { typeOfList, userSchema, isHashRequired } = destination.Config;
- let outputPayloads = {};
+ let outputPayloads: Partial> = {};
const typeOfOperation = Object.keys(listData);
typeOfOperation.forEach((key) => {
if (properties.includes(key)) {
@@ -58,19 +59,24 @@ const createPayload = (message, destination) => {
return;
}
- const outputPayload = constructPayload(message, offlineDataJobsMapping);
+ const outputPayload = constructPayload(
+ message,
+ offlineDataJobsMapping,
+ ) as OfflineDataJobPayload;
outputPayload.operations = [];
// breaking the userIdentiFier array in chunks of 20
- const userIdentifierChunks = returnArrayOfSubarrays(userIdentifiersList, 20);
+ const userIdentifierChunks: Record[][] = returnArrayOfSubarrays(
+ userIdentifiersList,
+ 20,
+ );
// putting each chunk in different create/remove operations
switch (key) {
case 'add':
// for add operation
userIdentifierChunks.forEach((element) => {
const operations = {
- create: {},
+ create: { userIdentifiers: element },
};
- operations.create.userIdentifiers = element;
outputPayload.operations.push(operations);
});
outputPayloads = { ...outputPayloads, create: outputPayload };
@@ -79,9 +85,8 @@ const createPayload = (message, destination) => {
// for remove operation
userIdentifierChunks.forEach((element) => {
const operations = {
- remove: {},
+ remove: { userIdentifiers: element },
};
- operations.remove.userIdentifiers = element;
outputPayload.operations.push(operations);
});
outputPayloads = { ...outputPayloads, remove: outputPayload };
@@ -96,8 +101,12 @@ const createPayload = (message, destination) => {
return outputPayloads;
};
-const processEvent = async (metadata, message, destination) => {
- const response = [];
+const processEvent = async (
+ metadata: Record,
+ message: Message,
+ destination: GARLDestination,
+) => {
+ const response: unknown[] = [];
if (!message.type) {
throw new InstrumentationError('Message Type is not present. Aborting message.');
}
@@ -137,15 +146,19 @@ const processEvent = async (metadata, message, destination) => {
throw new InstrumentationError(`Message Type ${message.type} not supported.`);
};
-const process = async (event) => processEvent(event.metadata, event.message, event.destination);
+const process = async (event: {
+ metadata: Record;
+ message: Message;
+ destination: GARLDestination;
+}) => processEvent(event.metadata, event.message, event.destination);
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = [];
+const processRouterDest = async (inputs: { message: Message }[], reqMetadata: unknown) => {
+ const respList: unknown[] = [];
const groupedInputs = await groupByInBatches(inputs, (input) =>
input.message.type?.toLowerCase(),
);
- let transformedRecordEvent = [];
- let transformedAudienceEvent = [];
+ let transformedRecordEvent: unknown[] = [];
+ let transformedAudienceEvent: unknown[] = [];
const eventTypes = ['record', 'audiencelist'];
if (extraKeysPresent(groupedInputs, eventTypes)) {
@@ -153,7 +166,9 @@ const processRouterDest = async (inputs, reqMetadata) => {
}
if (groupedInputs.record) {
- transformedRecordEvent = await processRecordInputs(groupedInputs.record, reqMetadata);
+ transformedRecordEvent = await processRecordInputs(
+ groupedInputs.record as unknown as RecordInput[],
+ );
}
if (groupedInputs.audiencelist) {
@@ -161,6 +176,7 @@ const processRouterDest = async (inputs, reqMetadata) => {
groupedInputs.audiencelist,
process,
reqMetadata,
+ undefined,
);
}
@@ -168,4 +184,4 @@ const processRouterDest = async (inputs, reqMetadata) => {
return respList;
};
-module.exports = { process, processRouterDest };
+export { process, processRouterDest };
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/types.ts b/src/v0/destinations/google_adwords_remarketing_lists/types.ts
new file mode 100644
index 00000000000..a2b7d72d29f
--- /dev/null
+++ b/src/v0/destinations/google_adwords_remarketing_lists/types.ts
@@ -0,0 +1,58 @@
+import type { Destination } from '../../../types';
+
+export interface GARLDestinationConfig {
+ customerId: string;
+ audienceId: string;
+ loginCustomerId?: string;
+ subAccount?: boolean;
+ typeOfList: string;
+ userSchema: string[];
+ isHashRequired: boolean;
+ userDataConsent?: string;
+ personalizationConsent?: string;
+}
+
+export interface RecordEventContext {
+ message: unknown;
+ destination: { Config: GARLDestinationConfig };
+ accessToken: string;
+ audienceId: string;
+ typeOfList: string;
+ userSchema?: string[];
+ isHashRequired: boolean;
+ userDataConsent?: unknown;
+ personalizationConsent?: unknown;
+}
+
+export interface Message {
+ properties: { listData: Record[]> };
+ type: string;
+ [key: string]: unknown;
+}
+
+export interface RecordInput {
+ message: {
+ type?: string;
+ action?: string;
+ fields: Record;
+ identifiers?: Record;
+ };
+ metadata: Record;
+ destination: {
+ Config: GARLDestinationConfig;
+ };
+ connection: {
+ config: {
+ destination: GARLDestinationConfig;
+ };
+ };
+}
+
+export type GARLDestination = Destination;
+
+export interface OfflineDataJobPayload {
+ operations: Array<{
+ create?: { userIdentifiers: Record[] };
+ remove?: { userIdentifiers: Record[] };
+ }>;
+}
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/util.test.js b/src/v0/destinations/google_adwords_remarketing_lists/util.test.ts
similarity index 90%
rename from src/v0/destinations/google_adwords_remarketing_lists/util.test.js
rename to src/v0/destinations/google_adwords_remarketing_lists/util.test.ts
index 4a35cc23ecc..df924f6b3c3 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/util.test.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/util.test.ts
@@ -1,6 +1,6 @@
-const { populateIdentifiers, responseBuilder, getOperationAudienceId } = require('./util');
-const { API_VERSION } = require('./config');
-const { generateRandomString } = require('@rudderstack/integrations-lib');
+import { generateRandomString } from '@rudderstack/integrations-lib';
+import { populateIdentifiers, responseBuilder, getOperationAudienceId } from './util';
+import { API_VERSION } from './config';
const accessToken = generateRandomString();
const body = {
@@ -168,9 +168,11 @@ describe('GARL utils test', () => {
getOperationAudienceId(baseDestination.Config.audienceId, message),
consentBlock,
);
- expect(response).toEqual();
- } catch (error) {
- expect(error.message).toEqual(`loginCustomerId is required as subAccount is true.`);
+ expect(response).toEqual(undefined);
+ } catch (error: unknown) {
+ expect((error as Error).message).toEqual(
+ `loginCustomerId is required as subAccount is true.`,
+ );
}
});
@@ -185,9 +187,9 @@ describe('GARL utils test', () => {
getOperationAudienceId(baseDestination.Config.audienceId, message),
consentBlock,
);
- expect(response).toEqual();
- } catch (error) {
- expect(error.message).toEqual(`List ID is a mandatory field`);
+ expect(response).toEqual(undefined);
+ } catch (error: unknown) {
+ expect((error as Error).message).toEqual(`List ID is a mandatory field`);
}
});
});
diff --git a/src/v0/destinations/google_adwords_remarketing_lists/util.js b/src/v0/destinations/google_adwords_remarketing_lists/util.ts
similarity index 76%
rename from src/v0/destinations/google_adwords_remarketing_lists/util.js
rename to src/v0/destinations/google_adwords_remarketing_lists/util.ts
index 9d8b48aa4b7..2a17700577b 100644
--- a/src/v0/destinations/google_adwords_remarketing_lists/util.js
+++ b/src/v0/destinations/google_adwords_remarketing_lists/util.ts
@@ -1,18 +1,18 @@
-const get = require('get-value');
-const sha256 = require('sha256');
-const { ConfigurationError } = require('@rudderstack/integrations-lib');
-const {
+import get from 'get-value';
+import sha256 from 'sha256';
+import { ConfigurationError } from '@rudderstack/integrations-lib';
+import {
isDefinedAndNotNullAndNotEmpty,
constructPayload,
defaultRequestConfig,
removeHyphens,
removeUndefinedAndNullValues,
getDestinationExternalIDInfoForRetl,
-} = require('../../util');
-const logger = require('../../../logger');
-const { MappedToDestinationKey } = require('../../../constants');
-const { JSON_MIME_TYPE } = require('../../util/constant');
-const {
+} from '../../util';
+import logger from '../../../logger';
+import { MappedToDestinationKey } from '../../../constants';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import {
addressInfoMapping,
attributeMapping,
TYPEOFLIST,
@@ -20,9 +20,10 @@ const {
BASE_ENDPOINT,
hashAttributes,
ADDRESS_INFO_ATTRIBUTES,
-} = require('./config');
+} from './config';
+import type { GARLDestinationConfig } from './types';
-const hashEncrypt = (object) => {
+const hashEncrypt = (object: Record) => {
Object.keys(object).forEach((key) => {
if (hashAttributes.includes(key) && object[key]) {
// eslint-disable-next-line no-param-reassign
@@ -31,7 +32,13 @@ const hashEncrypt = (object) => {
});
};
-const responseBuilder = (accessToken, body, { Config }, audienceId, consentBlock) => {
+const responseBuilder = (
+ accessToken: string,
+ body: unknown,
+ { Config }: { Config: GARLDestinationConfig },
+ audienceId: string | null,
+ consentBlock: Record,
+) => {
const payload = body;
const response = defaultRequestConfig();
const filteredCustomerId = removeHyphens(Config.customerId);
@@ -68,9 +75,14 @@ const responseBuilder = (accessToken, body, { Config }, audienceId, consentBlock
* @param {boolean} isHashRequired
* @returns
*/
-const populateIdentifiers = (attributeArray, typeOfList, userSchema, isHashRequired) => {
- const userIdentifier = [];
- let attribute;
+const populateIdentifiers = (
+ attributeArray: Record[],
+ typeOfList: string,
+ userSchema: string[],
+ isHashRequired: boolean,
+) => {
+ const userIdentifier: Record[] = [];
+ let attribute: string | string[];
if (TYPEOFLIST[typeOfList]) {
attribute = TYPEOFLIST[typeOfList];
} else {
@@ -110,12 +122,12 @@ const populateIdentifiers = (attributeArray, typeOfList, userSchema, isHashRequi
};
const populateIdentifiersForRecordEvent = (
- identifiersArray,
- typeOfList,
- userSchema,
- isHashRequired,
+ identifiersArray: Record[],
+ typeOfList: string,
+ userSchema: string[] | undefined,
+ isHashRequired: boolean,
) => {
- const userIdentifiers = [];
+ const userIdentifiers: Record[] = [];
if (isDefinedAndNotNullAndNotEmpty(identifiersArray)) {
// traversing through every element in the add array
@@ -127,14 +139,14 @@ const populateIdentifiersForRecordEvent = (
userIdentifiers.push({ [TYPEOFLIST[typeOfList]]: identifiers[TYPEOFLIST[typeOfList]] });
} else {
Object.entries(attributeMapping).forEach(([key, mappedKey]) => {
- if (identifiers[key] && userSchema.includes(key))
+ if (identifiers[key] && userSchema?.includes(key))
userIdentifiers.push({ [mappedKey]: identifiers[key] });
});
const addressInfo = constructPayload(identifiers, addressInfoMapping);
if (
isDefinedAndNotNullAndNotEmpty(addressInfo) &&
- (userSchema.includes('addressInfo') ||
- userSchema.some((schema) => ADDRESS_INFO_ATTRIBUTES.includes(schema)))
+ (userSchema?.includes('addressInfo') ||
+ userSchema?.some((schema) => ADDRESS_INFO_ATTRIBUTES.includes(schema)))
)
userIdentifiers.push({ addressInfo });
}
@@ -143,7 +155,7 @@ const populateIdentifiersForRecordEvent = (
return userIdentifiers;
};
-const getOperationAudienceId = (audienceId, message) => {
+const getOperationAudienceId = (audienceId: string, message: Record) => {
let operationAudienceId = audienceId;
const mappedToDestination = get(message, MappedToDestinationKey);
if (!operationAudienceId && mappedToDestination) {
@@ -151,12 +163,12 @@ const getOperationAudienceId = (audienceId, message) => {
message,
'GOOGLE_ADWORDS_REMARKETING_LISTS',
);
- operationAudienceId = objectType;
+ operationAudienceId = objectType!;
}
return operationAudienceId;
};
-module.exports = {
+export {
populateIdentifiers,
responseBuilder,
getOperationAudienceId,
diff --git a/src/v0/destinations/heap/config.js b/src/v0/destinations/heap/config.js
deleted file mode 100644
index 8a1ea65ec17..00000000000
--- a/src/v0/destinations/heap/config.js
+++ /dev/null
@@ -1,20 +0,0 @@
-const { getMappingConfig } = require('../../util');
-
-const baseEndpoint = 'https://heapanalytics.com/api';
-
-const endpoints = {
- trackUrl: `${baseEndpoint}/track`, // track properties, | Track
- identifyUrl: `${baseEndpoint}/add_user_properties`, // identify a user| Identify
-};
-
-const CONFIG_CATEGORIES = {
- IDENTIFY: { endPoint: endpoints.identifyUrl, name: 'HEAPIdentifyConfig' },
- TRACK: { endPoint: endpoints.trackUrl, name: 'HEAPTrackConfig' },
-};
-
-const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname);
-
-module.exports = {
- CONFIG_CATEGORIES,
- MAPPING_CONFIG,
-};
diff --git a/src/v0/destinations/heap/data/HEAPIdentifyConfig.json b/src/v0/destinations/heap/data/HEAPIdentifyConfig.json
deleted file mode 100644
index 20c690a5b74..00000000000
--- a/src/v0/destinations/heap/data/HEAPIdentifyConfig.json
+++ /dev/null
@@ -1,14 +0,0 @@
-[
- {
- "destKey": "identity",
- "sourceKeys": "userId",
- "sourceFromGenericMap": true,
- "required": true
- },
- {
- "destKey": "properties",
- "sourceKeys": "traits",
- "sourceFromGenericMap": true,
- "required": true
- }
-]
diff --git a/src/v0/destinations/heap/data/HEAPTrackConfig.json b/src/v0/destinations/heap/data/HEAPTrackConfig.json
deleted file mode 100644
index c326795a529..00000000000
--- a/src/v0/destinations/heap/data/HEAPTrackConfig.json
+++ /dev/null
@@ -1,29 +0,0 @@
-[
- {
- "destKey": "identity",
- "sourceKeys": "userId",
- "sourceFromGenericMap": true,
- "required": true
- },
- {
- "destKey": "event",
- "sourceKeys": "event",
- "required": true
- },
- {
- "destKey": "properties",
- "sourceKeys": "properties",
- "required": false
- },
- {
- "destKey": "timestamp",
- "sourceKeys": "timestamp",
- "sourceFromGenericMap": true,
- "required": false
- },
- {
- "destKey": "idempotency_key",
- "sourceKeys": "properties.idempotencyKey",
- "required": false
- }
-]
diff --git a/src/v0/destinations/heap/transform.js b/src/v0/destinations/heap/transform.js
deleted file mode 100644
index 42563241657..00000000000
--- a/src/v0/destinations/heap/transform.js
+++ /dev/null
@@ -1,71 +0,0 @@
-const { InstrumentationError, TransformationError } = require('@rudderstack/integrations-lib');
-const { CONFIG_CATEGORIES, MAPPING_CONFIG } = require('./config');
-const { EventType } = require('../../../constants');
-const {
- constructPayload,
- defaultPostRequestConfig,
- removeUndefinedAndNullValues,
- defaultRequestConfig,
- flattenJson,
- simpleProcessRouterDest,
-} = require('../../util');
-const { JSON_MIME_TYPE } = require('../../util/constant');
-
-function responseBuilderSimple(message, category, destination) {
- const payload = constructPayload(message, MAPPING_CONFIG[category.name]);
- if (payload) {
- if (payload.properties) {
- payload.properties = flattenJson(payload.properties);
- // remove duplicate key as it is being passed at root.
- if (payload.properties.idempotencyKey) {
- delete payload.properties.idempotencyKey;
- }
- }
- const responseBody = {
- ...payload,
- app_id: destination.Config.appId,
- };
- const response = defaultRequestConfig();
- response.endpoint = category.endPoint;
- response.method = defaultPostRequestConfig.requestMethod;
- response.headers = {
- 'Content-Type': JSON_MIME_TYPE,
- Accept: JSON_MIME_TYPE,
- };
- response.userId = message.anonymousId;
- response.body.JSON = removeUndefinedAndNullValues(responseBody);
- return response;
- }
- // fail-safety for developer error
- throw new TransformationError('Payload could not be constructed');
-}
-
-const processEvent = (message, destination) => {
- if (!message.type) {
- throw new InstrumentationError('invalid message type for heap');
- }
-
- const messageType = message.type;
- let category;
- switch (messageType.toLowerCase()) {
- case EventType.IDENTIFY:
- category = CONFIG_CATEGORIES.IDENTIFY;
- break;
- case EventType.TRACK:
- category = CONFIG_CATEGORIES.TRACK;
- break;
- default:
- throw new InstrumentationError(`message type ${messageType} not supported for heap`);
- }
-
- // build the response
- return responseBuilderSimple(message, category, destination);
-};
-
-const process = async (event) => processEvent(event.message, event.destination);
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = await simpleProcessRouterDest(inputs, process, reqMetadata);
- return respList;
-};
-
-module.exports = { process, processRouterDest };
diff --git a/src/v0/destinations/hs/HSTransform-v1.js b/src/v0/destinations/hs/HSTransform-v1.ts
similarity index 77%
rename from src/v0/destinations/hs/HSTransform-v1.js
rename to src/v0/destinations/hs/HSTransform-v1.ts
index 0cc7fa98379..266db056be1 100644
--- a/src/v0/destinations/hs/HSTransform-v1.js
+++ b/src/v0/destinations/hs/HSTransform-v1.ts
@@ -1,12 +1,12 @@
-const get = require('get-value');
-const lodash = require('lodash');
-const {
+import get from 'get-value';
+import lodash from 'lodash';
+import {
InstrumentationError,
ConfigurationError,
TransformationError,
-} = require('@rudderstack/integrations-lib');
-const { MappedToDestinationKey, GENERIC_TRUE_VALUES } = require('../../../constants');
-const {
+} from '@rudderstack/integrations-lib';
+import { MappedToDestinationKey, GENERIC_TRUE_VALUES } from '../../../constants';
+import {
defaultGetRequestConfig,
defaultPostRequestConfig,
defaultRequestConfig,
@@ -19,8 +19,8 @@ const {
getDestinationExternalID,
getDestinationExternalIDInfoForRetl,
sortBatchesByMinJobId,
-} = require('../../util');
-const {
+} from '../../util';
+import {
BATCH_CONTACT_ENDPOINT,
MAX_BATCH_SIZE,
TRACK_ENDPOINT,
@@ -29,16 +29,26 @@ const {
CRM_CREATE_UPDATE_ALL_OBJECTS,
MAX_BATCH_SIZE_CRM_OBJECT,
MAX_BATCH_SIZE_CRM_CONTACT,
-} = require('./config');
-const {
+} from './config';
+import {
getTransformedJSON,
getEmailAndUpdatedProps,
formatPropertyValueForIdentify,
getHsSearchId,
populateTraits,
removeHubSpotSystemField,
-} = require('./util');
-const { JSON_MIME_TYPE } = require('../../util/constant');
+} from './util';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import type { Metadata } from '../../../types';
+import type {
+ HubSpotPropertyMap,
+ HubSpotLegacyTrackParams,
+ HubSpotRouterTransformationOutput,
+ HubspotRouterRequest,
+ HubspotProcessorTransformationOutput,
+ HubSpotBatchProcessingItem,
+ HubSpotBatchRequestOutput,
+} from './types';
/**
* using legacy API
@@ -53,14 +63,17 @@ const { JSON_MIME_TYPE } = require('../../util/constant');
* @param {*} propertyMap
* @returns
*/
-const processLegacyIdentify = async ({ message, destination, metadata }, propertyMap) => {
+const processLegacyIdentify = async (
+ { message, destination, metadata }: HubspotRouterRequest,
+ propertyMap?: HubSpotPropertyMap,
+): Promise => {
const { Config } = destination;
let traits = getFieldValueFromMessage(message, 'traits');
const mappedToDestination = get(message, MappedToDestinationKey);
const operation = get(message, 'context.hubspotOperation');
// if mappedToDestination is set true, then add externalId to traits
// rETL source
- let endpoint;
+ let endpoint: string = '';
const response = defaultRequestConfig();
response.method = defaultPostRequestConfig.requestMethod;
if (
@@ -69,7 +82,8 @@ const processLegacyIdentify = async ({ message, destination, metadata }, propert
operation
) {
addExternalIdToTraits(message);
- const { objectType } = getDestinationExternalIDInfoForRetl(message, 'HS');
+ const externalIdInfo = getDestinationExternalIDInfoForRetl(message, 'HS');
+ const objectType = externalIdInfo?.objectType;
if (!objectType) {
throw new InstrumentationError('objectType not found');
}
@@ -138,14 +152,17 @@ const processLegacyIdentify = async ({ message, destination, metadata }, propert
* @param {*} propertyMap
* @returns
*/
-const processLegacyTrack = async ({ message, destination, metadata }, propertyMap) => {
+const processLegacyTrack = async (
+ { message, destination, metadata }: HubspotRouterRequest,
+ propertyMap?: HubSpotPropertyMap,
+): Promise => {
const { Config } = destination;
if (!Config.hubID) {
throw new ConfigurationError('Invalid hub id value provided in the destination configuration');
}
- const parameters = {
+ const parameters: HubSpotLegacyTrackParams = {
_a: Config.hubID,
_n: message.event,
_m:
@@ -179,12 +196,17 @@ const processLegacyTrack = async ({ message, destination, metadata }, propertyMa
return response;
};
+
// Seggregating update and create calls for retl sources
-const batchIdentifyForrETL = (arrayChunksIdentify, batchedResponseList, batchOperation) => {
+const batchIdentifyForrETL = (
+ arrayChunksIdentify: HubSpotBatchProcessingItem[][],
+ batchedResponseList: HubSpotRouterTransformationOutput[],
+ batchOperation: string,
+): HubSpotRouterTransformationOutput[] => {
// list of chunks [ [..], [..] ]
arrayChunksIdentify.forEach((chunk) => {
- const identifyResponseList = [];
- const metadata = [];
+ const identifyResponseList: Record[] = [];
+ const metadata: Metadata[] = [];
// extracting message, destination value
// from the first event in a batch
@@ -196,7 +218,9 @@ const batchIdentifyForrETL = (arrayChunksIdentify, batchedResponseList, batchOpe
// create operation
chunk.forEach((ev) => {
// if source is of rETL
- identifyResponseList.push({ ...ev.message.body.JSON });
+ identifyResponseList.push({
+ ...ev.message.body.JSON,
+ });
batchEventResponse.batchedRequest.endpoint = `${ev.message.endpoint}/batch/create`;
metadata.push(ev.metadata);
@@ -224,8 +248,8 @@ const batchIdentifyForrETL = (arrayChunksIdentify, batchedResponseList, batchOpe
inputs: identifyResponseList,
};
- batchEventResponse.batchedRequest.headers = message.headers;
- batchEventResponse.batchedRequest.params = message.params;
+ batchEventResponse.batchedRequest.headers = message.headers!;
+ batchEventResponse.batchedRequest.params = message.params!;
batchEventResponse = {
...batchEventResponse,
@@ -244,24 +268,26 @@ const batchIdentifyForrETL = (arrayChunksIdentify, batchedResponseList, batchOpe
return batchedResponseList;
};
-const legacyBatchEvents = (destEvents) => {
- let batchedResponseList = [];
- const trackResponseList = [];
- const eventsChunk = [];
- const createAllObjectsEventChunk = [];
- const updateAllObjectsEventChunk = [];
- let maxBatchSize;
+const legacyBatchEvents = (
+ destEvents: HubSpotBatchProcessingItem[],
+): HubSpotRouterTransformationOutput[] => {
+ let batchedResponseList: HubSpotRouterTransformationOutput[] = [];
+ const trackResponseList: HubSpotRouterTransformationOutput[] = [];
+ const eventsChunk: HubSpotBatchProcessingItem[] = [];
+ const createAllObjectsEventChunk: HubSpotBatchProcessingItem[] = [];
+ const updateAllObjectsEventChunk: HubSpotBatchProcessingItem[] = [];
+ let maxBatchSize: number | undefined;
destEvents.forEach((event) => {
// handler for track call
if (event.message.messageType === 'track') {
const { message, metadata, destination } = event;
const endpoint = get(message, 'endpoint');
- const batchedResponse = defaultBatchRequestConfig();
- batchedResponse.batchedRequest.headers = message.headers;
+ const batchedResponse: HubSpotBatchRequestOutput = defaultBatchRequestConfig();
+ batchedResponse.batchedRequest.headers = message.headers!;
batchedResponse.batchedRequest.endpoint = endpoint;
batchedResponse.batchedRequest.body = message.body;
- batchedResponse.batchedRequest.params = message.params;
+ batchedResponse.batchedRequest.params = message.params!;
batchedResponse.batchedRequest.method = defaultGetRequestConfig.requestMethod;
batchedResponse.metadata = [metadata];
batchedResponse.destination = destination;
@@ -319,8 +345,8 @@ const legacyBatchEvents = (destEvents) => {
// list of chunks [ [..], [..] ]
arrayChunksIdentify.forEach((chunk) => {
- const identifyResponseList = [];
- const metadata = [];
+ const identifyResponseList: Record[] = [];
+ const metadata: Metadata[] = [];
// extracting destination, apiKey value
// from the first event in a batch
@@ -332,21 +358,34 @@ const legacyBatchEvents = (destEvents) => {
chunk.forEach((ev) => {
// if source is of rETL
if (ev.message.source === 'rETL') {
- identifyResponseList.push({ ...ev.message.body.JSON });
+ identifyResponseList.push({
+ ...ev.message.body.JSON,
+ });
batchEventResponse.batchedRequest.body.JSON = {
inputs: identifyResponseList,
};
batchEventResponse.batchedRequest.endpoint = `${ev.message.endpoint}/batch/create`;
metadata.push(ev.metadata);
} else {
- const { email, updatedProperties } = getEmailAndUpdatedProps(
- ev.message.body.JSON.properties,
- );
+ const bodyJSON = ev.message.body.JSON;
+
+ if (
+ !bodyJSON ||
+ Array.isArray(bodyJSON) ||
+ !('properties' in bodyJSON) ||
+ !Array.isArray(bodyJSON.properties)
+ ) {
+ throw new TransformationError(
+ 'Legacy identify batch: invalid payload (expected object with properties array)',
+ );
+ }
+
+ const { email, updatedProperties } = getEmailAndUpdatedProps(bodyJSON.properties);
// eslint-disable-next-line no-param-reassign
- ev.message.body.JSON.properties = updatedProperties;
+ bodyJSON.properties = updatedProperties;
identifyResponseList.push({
email,
- properties: ev.message.body.JSON.properties,
+ properties: bodyJSON.properties,
});
metadata.push(ev.metadata);
batchEventResponse.batchedRequest.body.JSON_ARRAY = {
@@ -390,8 +429,4 @@ const legacyBatchEvents = (destEvents) => {
return sortBatchesByMinJobId(batchedResponseList.concat(trackResponseList));
};
-module.exports = {
- processLegacyIdentify,
- processLegacyTrack,
- legacyBatchEvents,
-};
+export { processLegacyIdentify, processLegacyTrack, legacyBatchEvents };
diff --git a/src/v0/destinations/hs/HSTransform-v2.js b/src/v0/destinations/hs/HSTransform-v2.ts
similarity index 59%
rename from src/v0/destinations/hs/HSTransform-v2.js
rename to src/v0/destinations/hs/HSTransform-v2.ts
index c0fa525d9ea..1ca374ac0d8 100644
--- a/src/v0/destinations/hs/HSTransform-v2.js
+++ b/src/v0/destinations/hs/HSTransform-v2.ts
@@ -1,13 +1,13 @@
-const get = require('get-value');
-const lodash = require('lodash');
-const {
+import get from 'get-value';
+import lodash from 'lodash';
+import {
TransformationError,
ConfigurationError,
InstrumentationError,
-} = require('@rudderstack/integrations-lib');
-const validator = require('validator');
-const { MappedToDestinationKey, GENERIC_TRUE_VALUES } = require('../../../constants');
-const {
+} from '@rudderstack/integrations-lib';
+import validator from 'validator';
+import { MappedToDestinationKey, GENERIC_TRUE_VALUES } from '../../../constants';
+import {
defaultPostRequestConfig,
defaultRequestConfig,
defaultPatchRequestConfig,
@@ -21,14 +21,15 @@ const {
getDestinationExternalIDInfoForRetl,
getDestinationExternalIDObjectForRetl,
sortBatchesByMinJobId,
-} = require('../../util');
-const stats = require('../../../util/stats');
-const {
+} from '../../util';
+import stats from '../../../util/stats';
+import {
IDENTIFY_CRM_UPDATE_CONTACT,
IDENTIFY_CRM_CREATE_NEW_CONTACT,
MAX_BATCH_SIZE_CRM_CONTACT,
BATCH_IDENTIFY_CRM_CREATE_NEW_CONTACT,
BATCH_IDENTIFY_CRM_UPDATE_CONTACT,
+ BATCH_IDENTIFY_CRM_UPSERT_CONTACT,
mappingConfig,
ConfigCategory,
TRACK_CRM_ENDPOINT,
@@ -37,8 +38,8 @@ const {
CRM_ASSOCIATION_V3,
RETL_CREATE_ASSOCIATION_OPERATION,
RETL_SOURCE,
-} = require('./config');
-const {
+} from './config';
+import {
getTransformedJSON,
searchContacts,
getEventAndPropertiesFromConfig,
@@ -46,22 +47,82 @@ const {
populateTraits,
addExternalIdToHSTraits,
removeHubSpotSystemField,
-} = require('./util');
-const { JSON_MIME_TYPE } = require('../../util/constant');
+ isUpsertEnabled,
+ isLookupFieldUnique,
+ getLookupFieldValue,
+ addHsAuthentication,
+} from './util';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import type { Metadata } from '../../../types';
+import type {
+ HubSpotDestination,
+ HubSpotPropertyMap,
+ HubSpotTrackEventRequest,
+ HubSpotBatchInputItem,
+ HubSpotRouterTransformationOutput,
+ HubspotProcessorTransformationOutput,
+ HubspotRouterRequest,
+ HubSpotBatchProcessingItem,
+ HubspotRudderMessage,
+ HubSpotBatchRequestOutput,
+ HubSpotUpsertPayload,
+} from './types';
+import { hasPropertiesRecord, hasAssociationShape, hasUpsertPayloadShape } from './types';
-const addHsAuthentication = (response, Config) => {
- // choosing API Type
- if (Config.authorizationType === 'newPrivateAppApi') {
- // Private Apps
- response.headers = {
- ...response.headers,
- Authorization: `Bearer ${Config.accessToken}`,
- };
- } else {
- // use legacy API Key
- response.params = { hapikey: Config.apiKey };
+/**
+ * Process identify event for HubSpot V3 Upsert API.
+ * This function builds the upsert payload that will be batched and sent to
+ * /crm/v3/objects/contacts/batch/upsert endpoint.
+ *
+ * Ref - https://developers.hubspot.com/docs/api/crm/contacts#create-or-update-contacts-upsert
+ *
+ * @param {object} param0 - Object containing message, destination, and metadata
+ * @param {object} propertyMap - HubSpot property map for type validation
+ * @returns {object} - Response object with upsert payload
+ */
+const processUpsertIdentify = async (
+ {
+ message,
+ destination,
+ metadata,
+ }: { message: HubspotRudderMessage; destination: HubSpotDestination; metadata: Metadata },
+ propertyMap?: HubSpotPropertyMap,
+): Promise => {
+ const { Config } = destination;
+
+ // Get lookup info for upsert (id and idProperty)
+ const lookupFieldInfo = getLookupFieldValue(message, Config.lookupField!);
+ if (!lookupFieldInfo) {
+ throw new InstrumentationError(
+ `Identify:: lookupField "${Config.lookupField}" value not found in traits. Email fallback also not available.`,
+ );
}
- return response;
+
+ // Build properties payload
+ let properties = await getTransformedJSON({ message, destination, metadata }, propertyMap);
+ properties = removeHubSpotSystemField(properties);
+
+ // Build upsert payload
+ // Ref: https://developers.hubspot.com/docs/api/crm/contacts#create-or-update-contacts-upsert
+ const upsertPayload = {
+ id: lookupFieldInfo.value,
+ idProperty: lookupFieldInfo.fieldName,
+ properties,
+ // objectWriteTraceId is used to correlate results in 207 multi-status responses
+ objectWriteTraceId: metadata?.jobId?.toString(),
+ };
+
+ // Build response
+ const response = defaultRequestConfig();
+ response.method = defaultPostRequestConfig.requestMethod;
+ response.endpoint = BATCH_IDENTIFY_CRM_UPSERT_CONTACT;
+ response.headers = {
+ 'Content-Type': JSON_MIME_TYPE,
+ };
+ response.body.JSON = removeUndefinedAndNullValues(upsertPayload);
+ response.operation = 'upsertContacts';
+
+ return addHsAuthentication(response, Config);
};
/**
@@ -72,9 +133,16 @@ const addHsAuthentication = (response, Config) => {
* @param {*} propertyMap
* @returns
*/
-const processIdentify = async ({ message, destination, metadata }, propertyMap) => {
+const processIdentify = async (
+ {
+ message,
+ destination,
+ metadata,
+ }: { message: HubspotRudderMessage; destination: HubSpotDestination; metadata: Metadata },
+ propertyMap?: HubSpotPropertyMap,
+): Promise => {
const { Config } = destination;
- let traits = getFieldValueFromMessage(message, 'traits');
+ let traits: Record = getFieldValueFromMessage(message, 'traits');
// since hubspot does not allow invalid emails, we need to
// validate the email before sending it to hubspot
if (traits?.email && !validator.isEmail(traits.email)) {
@@ -83,18 +151,20 @@ const processIdentify = async ({ message, destination, metadata }, propertyMap)
const mappedToDestination = get(message, MappedToDestinationKey);
const operation = get(message, 'context.hubspotOperation');
const externalIdObj = getDestinationExternalIDObjectForRetl(message, 'HS');
- const { objectType } = getDestinationExternalIDInfoForRetl(message, 'HS');
+ const externalIdInfo = getDestinationExternalIDInfoForRetl(message, 'HS');
+ const objectType = externalIdInfo?.objectType;
// build response
- let endpoint;
+ let endpoint: string | undefined;
const response = defaultRequestConfig();
response.method = defaultPostRequestConfig.requestMethod;
// Handle hubspot association events sent from retl source
if (
objectType &&
- objectType.toLowerCase() === 'association' &&
+ String(objectType).toLowerCase() === 'association' &&
mappedToDestination &&
- GENERIC_TRUE_VALUES.includes(mappedToDestination.toString())
+ GENERIC_TRUE_VALUES.includes(mappedToDestination.toString()) &&
+ externalIdObj
) {
const { associationTypeId, fromObjectType, toObjectType } = externalIdObj;
response.endpoint = CRM_ASSOCIATION_V3.replace(':fromObjectType', fromObjectType).replace(
@@ -146,7 +216,17 @@ const processIdentify = async ({ message, destination, metadata }, propertyMap)
let contactId = getDestinationExternalID(message, 'hsContactId');
- // if contactId is not provided then search
+ // We can't use contactId for upsert, as it is a non-unique field.
+ // This skips the searchContacts call and uses the batch upsert endpoint
+ if (
+ !contactId &&
+ isUpsertEnabled(metadata?.workspaceId) &&
+ (await isLookupFieldUnique(destination, Config.lookupField!, metadata))
+ ) {
+ return processUpsertIdentify({ message, destination, metadata }, propertyMap);
+ }
+
+ // Legacy flow: search for contact if contactId is not provided
if (!contactId) {
contactId = await searchContacts(message, destination, metadata);
}
@@ -173,23 +253,12 @@ const processIdentify = async ({ message, destination, metadata }, propertyMap)
response.body.JSON = removeUndefinedAndNullValues(payload);
}
- response.endpoint = endpoint;
+ response.endpoint = endpoint!;
response.headers = {
'Content-Type': JSON_MIME_TYPE,
};
- // choosing API Type
- if (Config.authorizationType === 'newPrivateAppApi') {
- // Private Apps
- response.headers = {
- ...response.headers,
- Authorization: `Bearer ${Config.accessToken}`,
- };
- } else {
- // use legacy API Key
- response.params = { hapikey: Config.apiKey };
- }
- return response;
+ return addHsAuthentication(response, Config);
};
/**
@@ -199,10 +268,16 @@ const processIdentify = async ({ message, destination, metadata }, propertyMap)
* @param {*} destination
* @returns
*/
-const processTrack = async ({ message, destination }) => {
+const processTrack = async ({
+ message,
+ destination,
+}: HubspotRouterRequest): Promise => {
const { Config } = destination;
- let payload = constructPayload(message, mappingConfig[ConfigCategory.TRACK.name]);
+ let payload: HubSpotTrackEventRequest = constructPayload(
+ message,
+ mappingConfig[ConfigCategory.TRACK.name],
+ )!;
// fetch event name and its properties from config (webapp) and put it in final payload
payload = getEventAndPropertiesFromConfig(message, destination, payload);
@@ -245,12 +320,16 @@ const processTrack = async ({ message, destination }) => {
return response;
};
-const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation) => {
+const batchIdentify = (
+ arrayChunksIdentify: HubSpotBatchProcessingItem[][],
+ batchedResponseList: HubSpotRouterTransformationOutput[],
+ batchOperation: string,
+): HubSpotRouterTransformationOutput[] => {
// list of chunks [ [..], [..] ]
- const { destinationId } = arrayChunksIdentify[0][0].destination;
+ const destinationId = arrayChunksIdentify[0][0].destination.ID;
arrayChunksIdentify.forEach((chunk) => {
- const identifyResponseList = [];
- const metadata = [];
+ const identifyResponseList: Array> = [];
+ const metadata: Metadata[] = [];
// add metric for batch size
stats.gauge('hs_batch_size', chunk.length, {
destination_id: destinationId,
@@ -259,14 +338,16 @@ const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation)
// from the first event in a batch
const { message, destination } = chunk[0];
- let batchEventResponse = defaultBatchRequestConfig();
+ let batchEventResponse: HubSpotBatchRequestOutput = defaultBatchRequestConfig();
if (batchOperation === 'createObject') {
batchEventResponse.batchedRequest.endpoint = `${message.endpoint}/batch/create`;
// create operation
chunk.forEach((ev) => {
- identifyResponseList.push({ ...ev.message.body.JSON });
+ identifyResponseList.push({
+ ...ev.message.body.JSON,
+ });
metadata.push(ev.metadata);
});
} else if (batchOperation === 'updateObject') {
@@ -291,18 +372,20 @@ const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation)
// updating the existing one to avoid duplicate
// as same event can fire in batch one of the reason
// can be due to network lag or processor being busy
+ const bodyJSON = ev.message.body.JSON;
+
+ if (!hasPropertiesRecord(bodyJSON)) {
+ throw new TransformationError('rETL - Invalid payload for createContacts batch');
+ }
+
+ const { properties } = bodyJSON;
const isDuplicate = identifyResponseList.find(
- (data) => data.properties.email === ev.message.body.JSON.properties.email,
+ (data) => (data.properties as { email?: string })?.email === properties?.email,
);
- if (isDefinedAndNotNullAndNotEmpty(isDuplicate)) {
- // array is being shallow copied hence changes are affecting the original reference
- // basically rewriting the same value to avoid duplicate entry
- isDuplicate.properties = ev.message.body.JSON.properties;
+ if (isDefinedAndNotNullAndNotEmpty(isDuplicate) && isDuplicate) {
+ isDuplicate.properties = properties;
} else {
- // appending unique events
- identifyResponseList.push({
- properties: ev.message.body.JSON.properties,
- });
+ identifyResponseList.push({ properties });
}
metadata.push(ev.metadata);
});
@@ -312,30 +395,67 @@ const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation)
// update has contactId and properties
// extract contactId from the end of the endpoint
const id = ev.message.endpoint.split('/').pop();
+ const bodyJSON = ev.message.body.JSON;
// duplicate contactId is not allowed in batch
// updating the existing one to avoid duplicate
// as same event can fire in batch one of the reason
// can be due to network lag or processor being busy
const isDuplicate = identifyResponseList.find((data) => data.id === id);
- if (isDefinedAndNotNullAndNotEmpty(isDuplicate)) {
- // rewriting the same value to avoid duplicate entry
- isDuplicate.properties = ev.message.body.JSON.properties;
- } else {
- // appending unique events
- identifyResponseList.push({
- id,
- properties: ev.message.body.JSON.properties,
- });
+ if (hasPropertiesRecord(bodyJSON)) {
+ if (isDefinedAndNotNullAndNotEmpty(isDuplicate)) {
+ // rewriting the same value to avoid duplicate entry
+ isDuplicate!.properties = bodyJSON.properties;
+ } else {
+ // appending unique events
+ identifyResponseList.push({
+ id,
+ properties: bodyJSON.properties,
+ });
+ }
}
metadata.push(ev.metadata);
});
} else if (batchOperation === 'createAssociations') {
chunk.forEach((ev) => {
batchEventResponse.batchedRequest.endpoint = ev.message.endpoint;
+ if (!hasAssociationShape(ev.message.body.JSON)) {
+ throw new TransformationError('rETL - Invalid payload for createAssociations batch');
+ }
identifyResponseList.push(ev.message.body.JSON);
metadata.push(ev.metadata);
});
+ } else if (batchOperation === 'upsertContacts') {
+ // Upsert operation for V3 batch upsert endpoint
+ // Each event already has the complete upsert payload structure
+ // { id, idProperty, properties, objectWriteTraceId }
+ chunk.forEach((ev) => {
+ const json = ev.message.body.JSON;
+
+ if (!hasUpsertPayloadShape(json)) {
+ throw new TransformationError('Invalid payload for upsertContacts batch');
+ }
+ const { id, idProperty, properties } = json;
+
+ // Deduplicate by id (lookup value) - If we don't deduplicate, hubspot will fail the batch upsert request
+ const existing = identifyResponseList.find(
+ (data): data is HubSpotUpsertPayload =>
+ hasUpsertPayloadShape(data) && data.id === id && data.idProperty === idProperty,
+ );
+ if (existing) {
+ // Merge latest properties with existing properties
+ existing.properties = { ...existing.properties, ...properties };
+ // Track duplicate objectWriteTraceId for monitoring
+ stats.increment('hs_upsert_duplicate_trace_id', {
+ destination_id: destinationId,
+ });
+ } else {
+ // Add new entry with full upsert payload
+ identifyResponseList.push(json);
+ }
+ metadata.push(ev.metadata);
+ });
+ batchEventResponse.batchedRequest.endpoint = chunk[0].message.endpoint;
} else {
throw new TransformationError('Unknown hubspot operation', 400);
}
@@ -350,8 +470,8 @@ const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation)
batchEventResponse.batchedRequest.endpoint = BATCH_IDENTIFY_CRM_UPDATE_CONTACT;
}
- batchEventResponse.batchedRequest.headers = message.headers;
- batchEventResponse.batchedRequest.params = message.params;
+ batchEventResponse.batchedRequest.headers = message.headers!;
+ batchEventResponse.batchedRequest.params = message.params!;
batchEventResponse = {
...batchEventResponse,
@@ -370,18 +490,22 @@ const batchIdentify = (arrayChunksIdentify, batchedResponseList, batchOperation)
return batchedResponseList;
};
-const batchEvents = (destEvents) => {
- let batchedResponseList = [];
- const trackResponseList = [];
- // create contact chunck
- const createContactEventsChunk = [];
+const batchEvents = (
+ destEvents: HubSpotBatchProcessingItem[],
+): HubSpotRouterTransformationOutput[] => {
+ let batchedResponseList: HubSpotRouterTransformationOutput[] = [];
+ const trackResponseList: HubSpotRouterTransformationOutput[] = [];
+ // create contact chunk
+ const createContactEventsChunk: HubSpotBatchProcessingItem[] = [];
// update contact chunk
- const updateContactEventsChunk = [];
+ const updateContactEventsChunk: HubSpotBatchProcessingItem[] = [];
+ // upsert contact chunk (V3 batch upsert)
+ const upsertContactEventsChunk: HubSpotBatchProcessingItem[] = [];
// rETL specific chunk
- const createAllObjectsEventChunk = [];
- const updateAllObjectsEventChunk = [];
- const associationObjectsEventChunk = [];
- let maxBatchSize;
+ const createAllObjectsEventChunk: HubSpotBatchProcessingItem[] = [];
+ const updateAllObjectsEventChunk: HubSpotBatchProcessingItem[] = [];
+ const associationObjectsEventChunk: HubSpotBatchProcessingItem[] = [];
+ let maxBatchSize: number = MAX_BATCH_SIZE_CRM_OBJECT;
destEvents.forEach((event) => {
// handler for track call
@@ -391,11 +515,11 @@ const batchEvents = (destEvents) => {
const { message, metadata, destination } = event;
const endpoint = get(message, 'endpoint');
- const batchedResponse = defaultBatchRequestConfig();
- batchedResponse.batchedRequest.headers = message.headers;
+ const batchedResponse: HubSpotBatchRequestOutput = defaultBatchRequestConfig();
+ batchedResponse.batchedRequest.headers = message.headers!;
batchedResponse.batchedRequest.endpoint = endpoint;
batchedResponse.batchedRequest.body = message.body;
- batchedResponse.batchedRequest.params = message.params;
+ batchedResponse.batchedRequest.params = message.params!;
batchedResponse.batchedRequest.method = defaultPostRequestConfig.requestMethod;
batchedResponse.metadata = [metadata];
batchedResponse.destination = destination;
@@ -430,6 +554,9 @@ const batchEvents = (destEvents) => {
} else if (operation === 'updateContacts') {
// Identify: making chunks for CRM update contact endpoint
updateContactEventsChunk.push(event);
+ } else if (operation === 'upsertContacts') {
+ // Identify: making chunks for CRM upsert contact endpoint (V3 batch upsert)
+ upsertContactEventsChunk.push(event);
} else {
throw new TransformationError('rETL - Not a valid operation');
}
@@ -451,6 +578,12 @@ const batchEvents = (destEvents) => {
MAX_BATCH_SIZE_CRM_CONTACT,
);
+ // CRM upsert contact endpoint chunks (V3 batch upsert)
+ const arrayChunksIdentifyUpsertContact = lodash.chunk(
+ upsertContactEventsChunk,
+ MAX_BATCH_SIZE_CRM_CONTACT,
+ );
+
const arrayChunksIdentifyCreateAssociations = lodash.chunk(
associationObjectsEventChunk,
MAX_BATCH_SIZE_CRM_OBJECT,
@@ -492,6 +625,15 @@ const batchEvents = (destEvents) => {
);
}
+ // batching up 'upsert' contact endpoint chunks (V3 batch upsert)
+ if (arrayChunksIdentifyUpsertContact.length > 0) {
+ batchedResponseList = batchIdentify(
+ arrayChunksIdentifyUpsertContact,
+ batchedResponseList,
+ 'upsertContacts',
+ );
+ }
+
// batching association events
if (arrayChunksIdentifyCreateAssociations.length > 0) {
batchedResponseList = batchIdentify(
@@ -504,4 +646,4 @@ const batchEvents = (destEvents) => {
return sortBatchesByMinJobId(batchedResponseList.concat(trackResponseList));
};
-module.exports = { processIdentify, processTrack, batchEvents };
+export { processIdentify, processTrack, batchEvents };
diff --git a/src/v0/destinations/hs/config.js b/src/v0/destinations/hs/config.ts
similarity index 84%
rename from src/v0/destinations/hs/config.js
rename to src/v0/destinations/hs/config.ts
index 388a20fb7a1..e91cb9e8df9 100644
--- a/src/v0/destinations/hs/config.js
+++ b/src/v0/destinations/hs/config.ts
@@ -1,9 +1,11 @@
-const { getMappingConfig } = require('../../util');
+import { getMappingConfig } from '../../util';
const BASE_ENDPOINT = 'https://api.hubapi.com';
// For fetching properties from HubSpot
const CONTACT_PROPERTY_MAP_ENDPOINT = `${BASE_ENDPOINT}/properties/v1/contacts/properties`;
+// Ref - https://developers.hubspot.com/docs/api-reference/crm-properties-v3/core/get-crm-v3-properties-objectType
+const CRM_V3_CONTACT_PROPERTIES_ENDPOINT = `${BASE_ENDPOINT}/crm/v3/properties/contacts`;
/*
* Legacy API
@@ -36,6 +38,8 @@ const IDENTIFY_CRM_UPDATE_CONTACT = `${BASE_ENDPOINT}/crm/v3/objects/contacts/:c
// Identify Batch
const BATCH_IDENTIFY_CRM_CREATE_NEW_CONTACT = `${BASE_ENDPOINT}/crm/v3/objects/contacts/batch/create`;
const BATCH_IDENTIFY_CRM_UPDATE_CONTACT = `${BASE_ENDPOINT}/crm/v3/objects/contacts/batch/update`;
+// Ref - https://developers.hubspot.com/docs/api/crm/contacts#create-or-update-contacts-upsert
+const BATCH_IDENTIFY_CRM_UPSERT_CONTACT = `${BASE_ENDPOINT}/crm/v3/objects/contacts/batch/upsert`;
// Ref - https://developers.hubspot.com/docs/api/crm/contacts#endpoint?spec=GET-/crm/v3/objects/contacts
const MAX_BATCH_SIZE_CRM_CONTACT = 100;
@@ -84,16 +88,20 @@ const RETL_SOURCE = 'rETL';
const mappingConfig = getMappingConfig(ConfigCategory, __dirname);
const hsCommonConfigJson = mappingConfig[ConfigCategory.COMMON.name];
-const primaryToSecondaryFields = {
+const primaryToSecondaryFields: Record = {
email: 'hs_additional_emails',
};
// list of fields that hubspot maintains for each record of object
const HUBSPOT_SYSTEM_FIELDS = ['hs_object_id'];
-module.exports = {
+const DESTINATION = 'HS';
+const CONTACT_PROPERTIES_CACHE_TTL = 60 * 60 * 24; // 24 hours
+
+export {
BASE_ENDPOINT,
CONTACT_PROPERTY_MAP_ENDPOINT,
+ CRM_V3_CONTACT_PROPERTIES_ENDPOINT,
TRACK_ENDPOINT,
IDENTIFY_CREATE_UPDATE_CONTACT,
IDENTIFY_CREATE_NEW_CONTACT,
@@ -105,6 +113,7 @@ module.exports = {
IDENTIFY_CRM_UPDATE_CONTACT,
BATCH_IDENTIFY_CRM_CREATE_NEW_CONTACT,
BATCH_IDENTIFY_CRM_UPDATE_CONTACT,
+ BATCH_IDENTIFY_CRM_UPSERT_CONTACT,
MAX_BATCH_SIZE_CRM_CONTACT,
TRACK_CRM_ENDPOINT,
CRM_CREATE_UPDATE_ALL_OBJECTS,
@@ -120,6 +129,7 @@ module.exports = {
RETL_CREATE_ASSOCIATION_OPERATION,
MAX_CONTACTS_PER_REQUEST,
primaryToSecondaryFields,
- DESTINATION: 'HS',
+ DESTINATION,
HUBSPOT_SYSTEM_FIELDS,
+ CONTACT_PROPERTIES_CACHE_TTL,
};
diff --git a/src/v0/destinations/hs/transform.js b/src/v0/destinations/hs/transform.ts
similarity index 65%
rename from src/v0/destinations/hs/transform.js
rename to src/v0/destinations/hs/transform.ts
index 68747bae6d8..83bdd106d5c 100644
--- a/src/v0/destinations/hs/transform.js
+++ b/src/v0/destinations/hs/transform.ts
@@ -1,28 +1,36 @@
-const get = require('get-value');
-const { InstrumentationError } = require('@rudderstack/integrations-lib');
-const { EventType } = require('../../../constants');
-const {
+import get from 'get-value';
+import { InstrumentationError } from '@rudderstack/integrations-lib';
+import { EventType, MappedToDestinationKey, GENERIC_TRUE_VALUES } from '../../../constants';
+import {
handleRtTfSingleEventError,
getDestinationExternalIDInfoForRetl,
- groupEventsByType: batchEventsInOrder,
-} = require('../../util');
-const { API_VERSION } = require('./config');
-const {
- processLegacyIdentify,
- processLegacyTrack,
- legacyBatchEvents,
-} = require('./HSTransform-v1');
-const { MappedToDestinationKey, GENERIC_TRUE_VALUES } = require('../../../constants');
-const { processIdentify, processTrack, batchEvents } = require('./HSTransform-v2');
-const {
+ groupEventsByType,
+} from '../../util';
+import { API_VERSION } from './config';
+import { processLegacyIdentify, processLegacyTrack, legacyBatchEvents } from './HSTransform-v1';
+import { processIdentify, processTrack, batchEvents } from './HSTransform-v2';
+import {
splitEventsForCreateUpdate,
fetchFinalSetOfTraits,
getProperties,
validateDestinationConfig,
convertToResponseFormat,
-} = require('./util');
+} from './util';
+import type {
+ HubSpotPropertyMap,
+ HubSpotBatchRouterResult,
+ HubSpotRouterTransformationOutput,
+ HubspotRouterRequest,
+ HubspotProcessorTransformationOutput,
+ HubspotProcessorRequest,
+ HubSpotBatchProcessingItem,
+} from './types';
+import { isProcessorOutput } from './types';
-const processSingleMessage = async ({ message, destination, metadata }, propertyMap) => {
+const processSingleMessage = async (
+ { message, destination, metadata }: HubspotRouterRequest,
+ propertyMap?: HubSpotPropertyMap,
+): Promise => {
if (!message.type) {
throw new InstrumentationError('Message type is not present. Aborting message.');
}
@@ -30,7 +38,7 @@ const processSingleMessage = async ({ message, destination, metadata }, property
// Config Validation
validateDestinationConfig(destination);
- let response;
+ let response: HubspotProcessorTransformationOutput | HubspotProcessorTransformationOutput[];
switch (message.type) {
case EventType.IDENTIFY: {
response = [];
@@ -44,7 +52,7 @@ const processSingleMessage = async ({ message, destination, metadata }, property
}
case EventType.TRACK:
if (destination.Config.apiVersion === API_VERSION.v3) {
- response = await processTrack({ message, destination }, propertyMap);
+ response = await processTrack({ message, destination, metadata });
} else {
response = await processLegacyTrack({ message, destination, metadata }, propertyMap);
}
@@ -57,36 +65,42 @@ const processSingleMessage = async ({ message, destination, metadata }, property
};
// has been deprecated - using routerTransform for both the versions
-const process = async (event) => {
+const process = async (
+ event: HubspotProcessorRequest,
+): Promise => {
const { destination, message, metadata } = event;
const mappedToDestination = get(message, MappedToDestinationKey);
- let events = [];
- events = [event];
+ let events: HubspotProcessorRequest[] = [event];
if (mappedToDestination && GENERIC_TRUE_VALUES.includes(mappedToDestination?.toString())) {
// get info about existing objects and splitting accordingly.
- events = await splitEventsForCreateUpdate([event], destination, metadata);
+ events = await splitEventsForCreateUpdate(events, destination, metadata);
}
return processSingleMessage({
message: events[0].message,
- destination: events[0].destination,
- metadata: events[0].metadata || metadata,
+ destination,
+ metadata,
});
};
-const processBatchRouter = async (inputs, reqMetadata) => {
+
+const processBatchRouter = async (
+ inputs: HubspotRouterRequest[],
+ reqMetadata: NonNullable,
+): Promise => {
let tempInputs = inputs;
// using the first destination config for transforming the batch
const { destination, metadata } = tempInputs[0];
- let propertyMap;
+ let propertyMap: HubSpotPropertyMap | undefined;
const mappedToDestination = get(tempInputs[0].message, MappedToDestinationKey);
- const { objectType } = getDestinationExternalIDInfoForRetl(tempInputs[0].message, 'HS');
- const successRespList = [];
- const errorRespList = [];
+ const externalIdInfo = getDestinationExternalIDInfoForRetl(tempInputs[0].message, 'HS');
+ const objectType = externalIdInfo?.objectType;
+ const successRespList: HubSpotBatchProcessingItem[] = [];
+ const errorRespList: HubSpotRouterTransformationOutput[] = [];
// batch implementation
- let batchedResponseList = [];
+ let batchedResponseList: HubSpotRouterTransformationOutput[] = [];
try {
if (mappedToDestination && GENERIC_TRUE_VALUES.includes(mappedToDestination?.toString())) {
// skip splitting the batches to inserts and updates if object it is an association
- if (objectType?.toLowerCase() !== 'association') {
+ if (!objectType || String(objectType).toLowerCase() !== 'association') {
propertyMap = await getProperties(destination, metadata);
// get info about existing objects and splitting accordingly.
tempInputs = await splitEventsForCreateUpdate(tempInputs, destination, metadata);
@@ -100,7 +114,7 @@ const processBatchRouter = async (inputs, reqMetadata) => {
propertyMap = await getProperties(destination, metadata);
}
}
- } catch (error) {
+ } catch (error: unknown) {
// Any error thrown from the above try block applies to all the events
return {
batchedResponseList,
@@ -114,7 +128,7 @@ const processBatchRouter = async (inputs, reqMetadata) => {
await Promise.all(
inputs.map(async (input) => {
try {
- if (input.message.statusCode) {
+ if (input.message.statusCode && isProcessorOutput(input.message)) {
// already transformed event
successRespList.push({
message: input.message,
@@ -142,15 +156,15 @@ const processBatchRouter = async (inputs, reqMetadata) => {
});
});
}
- } catch (error) {
+ } catch (error: unknown) {
const errRespEvent = handleRtTfSingleEventError(input, error, reqMetadata);
errorRespList.push(errRespEvent);
}
}),
);
- const dontBatchTrueResponses = [];
- const dontBatchFalseOrUndefinedResponses = [];
+ const dontBatchTrueResponses: HubSpotBatchProcessingItem[] = [];
+ const dontBatchFalseOrUndefinedResponses: HubSpotBatchProcessingItem[] = [];
// segregating successRepList depending on dontbatch value
successRespList.forEach((successResp) => {
if (successResp.metadata?.dontBatch) {
@@ -175,12 +189,16 @@ const processBatchRouter = async (inputs, reqMetadata) => {
dontBatchEvents: convertToResponseFormat(dontBatchTrueResponses),
};
};
+
// we are batching by default at routerTransform
-const processRouterDest = async (inputs, reqMetadata) => {
- const tempNewInputs = batchEventsInOrder(inputs);
- const batchedResponseList = [];
- const errorRespList = [];
- const dontBatchEvents = [];
+const processRouterDest = async (
+ inputs: HubspotRouterRequest[],
+ reqMetadata: NonNullable,
+): Promise => {
+ const tempNewInputs: HubspotRouterRequest[][] = groupEventsByType(inputs);
+ const batchedResponseList: HubSpotRouterTransformationOutput[] = [];
+ const errorRespList: HubSpotRouterTransformationOutput[] = [];
+ const dontBatchEvents: HubSpotRouterTransformationOutput[] = [];
const promises = tempNewInputs.map(async (inputEvents) => {
const response = await processBatchRouter(inputEvents, reqMetadata);
return response;
@@ -196,4 +214,4 @@ const processRouterDest = async (inputs, reqMetadata) => {
return [...batchedResponseList, ...errorRespList, ...dontBatchEvents];
};
-module.exports = { process, processRouterDest };
+export { process, processRouterDest };
diff --git a/src/v0/destinations/hs/types.ts b/src/v0/destinations/hs/types.ts
new file mode 100644
index 00000000000..ad31bdb8c9c
--- /dev/null
+++ b/src/v0/destinations/hs/types.ts
@@ -0,0 +1,449 @@
+import type { Destination, Metadata, RudderMessage } from '../../../types';
+import type {
+ RouterTransformationResponse,
+ BatchedRequestBody,
+ RouterTransformationRequestData,
+ ProcessorTransformationOutput,
+ ProcessorTransformationRequest,
+ BatchedRequest,
+} from '../../../types/destinationTransformation';
+
+// ============================================================================
+// Destination Configuration Types
+// ============================================================================
+
+/**
+ * HubSpot Destination Configuration
+ * Ref: https://developers.hubspot.com/docs/api/crm/contacts
+ */
+export interface HubSpotDestinationConfig {
+ authorizationType: 'newPrivateAppApi' | 'legacyApiKey';
+ accessToken?: string;
+ apiKey?: string;
+ hubID?: string;
+ apiVersion?: 'legacyApi' | 'newApi';
+ lookupField?: string;
+ hubspotEvents?: HubSpotEventMapping[];
+}
+
+/**
+ * HubSpot Event Mapping from webapp config
+ */
+export interface HubSpotEventMapping {
+ rsEventName?: string;
+ hubspotEventName?: string;
+ eventProperties?: { from: string; to: string }[];
+}
+
+/**
+ * Typed Destination for HubSpot
+ */
+export type HubSpotDestination = Destination;
+
+// ============================================================================
+// Property Types
+// ============================================================================
+
+/**
+ * HubSpot Property Map - maps property names to their types
+ */
+export type HubSpotPropertyMap = Record;
+
+/**
+ * HubSpot Property from API response
+ * Ref: https://developers.hubspot.com/docs/api/crm/properties
+ */
+export interface HubSpotProperty {
+ name: string;
+ type: string;
+}
+
+/**
+ * HubSpot Lookup Field Info
+ */
+export interface HubSpotLookupFieldInfo {
+ fieldName: string;
+ value: unknown;
+}
+
+// ============================================================================
+// External ID Types (for rETL)
+// ============================================================================
+
+/**
+ * HubSpot External ID Info for rETL
+ */
+export interface HubSpotExternalIdInfo {
+ destinationExternalId: string | null;
+ objectType: string | null;
+ identifierType: string | null;
+}
+
+/**
+ * HubSpot External ID Object for rETL (association)
+ */
+export interface HubSpotExternalIdObject {
+ id?: string | number;
+ type?: string;
+ identifierType?: string;
+ associationTypeId?: string;
+ fromObjectType?: string;
+ toObjectType?: string;
+ hsSearchId?: string;
+ useSecondaryObject?: boolean;
+}
+
+/**
+ * HubSpot Contact Record for search results
+ */
+export interface HubSpotContactRecord {
+ id: string;
+ property: string;
+}
+
+// ============================================================================
+// API Request Body Types (for body.JSON)
+// ============================================================================
+
+/**
+ * HubSpot Identify Payload (Legacy API format)
+ * Ref: https://legacydocs.hubspot.com/docs/methods/contacts/create_contact
+ */
+export interface HubSpotLegacyIdentifyProperty {
+ property: string;
+ value: unknown;
+}
+
+/**
+ * Legacy API Identify Request Body
+ * Used in body.JSON for legacy identify calls
+ */
+export interface HubSpotLegacyIdentifyPayload {
+ properties: HubSpotLegacyIdentifyProperty[];
+}
+
+/**
+ * New API Identify Request Body (single contact)
+ * Ref: https://developers.hubspot.com/docs/api/crm/contacts
+ */
+export interface HubSpotIdentifyPayload {
+ properties: Record;
+}
+
+/**
+ * Batch Input Item for CRM API
+ * Ref: https://developers.hubspot.com/docs/api/crm/contacts
+ */
+export interface HubSpotBatchInputItem {
+ id?: string;
+ properties: Record;
+}
+
+/**
+ * Batch Request Body (for body.JSON in batch operations)
+ * Ref: https://developers.hubspot.com/docs/api/crm/contacts
+ */
+export interface HubSpotBatchPayload {
+ inputs: HubSpotBatchInputItem[];
+}
+
+/**
+ * Track Event Request Body (New API v3)
+ * Ref: https://developers.hubspot.com/docs/api/analytics/events
+ */
+export interface HubSpotTrackEventRequest {
+ eventName?: string;
+ email?: string;
+ utk?: string;
+ objectId?: string;
+ occurredAt?: string;
+ properties?: Record;
+}
+
+/**
+ * Legacy Track Event Params (query params)
+ * Ref: https://legacydocs.hubspot.com/docs/methods/enterprise_events/http_api
+ */
+export interface HubSpotLegacyTrackParams {
+ _a: string;
+ _n: string;
+ _m?: number | string;
+ id?: string;
+ email?: string;
+ [key: string]: unknown;
+}
+
+/**
+ * Association Request Body
+ * Ref: https://developers.hubspot.com/docs/api/crm/associations
+ */
+export interface HubSpotAssociationPayload {
+ from?: { id: string };
+ to?: { id: string };
+ type?: string;
+ [key: string]: unknown;
+}
+
+// ============================================================================
+// Search API Types
+// ============================================================================
+
+/**
+ * HubSpot Search API Request Body
+ * Ref: https://developers.hubspot.com/docs/api/crm/search
+ */
+export interface HubSpotSearchRequest {
+ filterGroups: {
+ filters: {
+ propertyName: string;
+ operator: string;
+ value?: unknown;
+ values?: string[];
+ }[];
+ }[];
+ properties?: string[];
+ sorts?: string[];
+ limit?: number;
+ after?: number;
+}
+
+/**
+ * HubSpot Search API Result Item
+ */
+export interface HubSpotSearchResult {
+ id: string;
+ properties: Record;
+ createdAt?: string;
+ updatedAt?: string;
+ archived?: boolean;
+}
+
+/**
+ * HubSpot Search API Response
+ */
+export interface HubSpotSearchResponse {
+ total?: number;
+ results?: HubSpotSearchResult[];
+ paging?: {
+ next?: {
+ after?: string | number;
+ link?: string;
+ };
+ };
+}
+
+export interface HubSpotUpsertPayload {
+ id: string;
+ idProperty: string;
+ properties: Record;
+ objectWriteTraceId?: string;
+}
+
+// ============================================================================
+// Transformer Internal Types
+// ============================================================================
+
+/**
+ * Union of all possible body.JSON payloads for HubSpot
+ */
+export type HubSpotRequestBodyJSON =
+ | HubSpotIdentifyPayload
+ | HubSpotIdentifyPayload[]
+ | HubSpotLegacyIdentifyPayload
+ | HubSpotLegacyIdentifyPayload[]
+ | HubSpotBatchPayload
+ | HubSpotBatchPayload[]
+ | HubSpotTrackEventRequest
+ | HubSpotTrackEventRequest[]
+ | HubSpotAssociationPayload
+ | HubSpotAssociationPayload[]
+ | HubSpotUpsertPayload
+ | HubSpotUpsertPayload[];
+
+/**
+ * HubSpot specific BatchedRequestBody with typed JSON
+ */
+export type HubSpotBatchedRequestBody = BatchedRequestBody;
+export interface HubSpotBatchRequestOutput {
+ batchedRequest: BatchedRequest<
+ HubSpotRequestBodyJSON,
+ Record, // headers
+ Record // params
+ >;
+ // These are the only fields we actually set before passing to getSuccessRespEvents
+ metadata?: Partial[];
+ destination?: HubSpotDestination;
+}
+export interface HubspotRudderMessage extends Omit {
+ context: RudderMessage['context'] & {
+ externalId: HubSpotExternalIdObject[];
+ hubspotOperation: 'createObject' | 'updateObject';
+ };
+ event: string;
+}
+
+/**
+ * Router input where message may be raw (HubspotRudderMessage) or already transformed (statusCode set)
+ */
+export type HubspotRouterInput =
+ | { message: HubspotRudderMessage; metadata: Metadata; destination: HubSpotDestination }
+ | {
+ message: HubspotProcessorTransformationOutput;
+ metadata: Metadata;
+ destination: HubSpotDestination;
+ };
+
+/**
+ * Type guard: message has already been transformed (processor output shape)
+ */
+export function isProcessorOutput(
+ msg: HubspotRudderMessage | HubspotProcessorTransformationOutput,
+): msg is HubspotProcessorTransformationOutput {
+ return (
+ typeof msg === 'object' &&
+ msg !== null &&
+ 'statusCode' in msg &&
+ 'body' in msg &&
+ typeof (msg as Record).statusCode === 'number' &&
+ (msg as Record).body !== undefined
+ );
+}
+
+/**
+ * Type guard: JSON payload has properties as Record (not array) - for create/update contact
+ */
+export function hasPropertiesRecord(
+ json: unknown,
+): json is { properties: Record } {
+ if (!json || Array.isArray(json)) return false;
+ const obj = json as Record;
+ return (
+ 'properties' in obj &&
+ obj.properties !== null &&
+ typeof obj.properties === 'object' &&
+ !Array.isArray(obj.properties)
+ );
+}
+
+/**
+ * Type guard: JSON payload is association shape (from, to, type)
+ */
+export function hasAssociationShape(
+ json: unknown,
+): json is { from: { id: string }; to: { id: string }; type: string } {
+ if (!json || Array.isArray(json)) return false;
+ const obj = json as Record;
+ return 'from' in obj && 'to' in obj && 'type' in obj;
+}
+
+/**
+ * Type guard: JSON payload is HubSpotUpsertPayload shape
+ */
+export function hasUpsertPayloadShape(json: unknown): json is HubSpotUpsertPayload {
+ if (!json || Array.isArray(json)) return false;
+ const obj = json as Record;
+ return (
+ typeof obj.id === 'string' &&
+ typeof obj.idProperty === 'string' &&
+ obj.properties !== undefined &&
+ obj.properties !== null &&
+ typeof obj.properties === 'object' &&
+ !Array.isArray(obj.properties)
+ );
+}
+
+/**
+ * Type guard: value is valid for date conversion
+ */
+export function isDateLike(value: unknown): value is string | number | Date {
+ return typeof value === 'string' || typeof value === 'number' || value instanceof Date;
+}
+
+/**
+ * Type guard: valid record (object, not array, not null)
+ */
+export function isRecord(value: unknown): value is Record {
+ return value !== null && typeof value === 'object' && !Array.isArray(value);
+}
+
+/**
+ * Type guard: value is HubSpotExternalIdInfo shape
+ */
+export function isHubSpotExternalIdInfo(value: unknown): value is HubSpotExternalIdInfo {
+ return (
+ isRecord(value) &&
+ 'destinationExternalId' in value &&
+ 'objectType' in value &&
+ 'identifierType' in value
+ );
+}
+
+/**
+ * Type guard: value is HubSpotSearchResponse shape
+ */
+export function isHubSpotSearchResponse(value: unknown): value is HubSpotSearchResponse {
+ return isRecord(value) && ('results' in value || 'total' in value || 'paging' in value);
+}
+
+/**
+ * HubSpot Transformed Message (internal)
+ */
+export type HubspotProcessorRequest = ProcessorTransformationRequest<
+ HubspotRudderMessage,
+ Metadata,
+ HubSpotDestination,
+ undefined
+>;
+
+export type HubspotRouterRequest = RouterTransformationRequestData<
+ HubspotRudderMessage,
+ HubSpotDestination,
+ undefined,
+ Metadata
+>;
+
+export interface HubspotProcessorTransformationOutput
+ extends Omit {
+ body: HubSpotBatchedRequestBody;
+ operation?:
+ | 'createObject'
+ | 'updateObject'
+ | 'createContacts'
+ | 'updateContacts'
+ | 'upsertContacts'
+ | 'createAssociation';
+ messageType?: 'track' | 'identify';
+ source?: string;
+ id?: string;
+}
+
+export type HubSpotBatchProcessingItem = {
+ message: HubspotProcessorTransformationOutput;
+ metadata: Metadata;
+ destination: HubSpotDestination;
+};
+
+/**
+ * HubSpot Router Transformation Response (typed version)
+ */
+export interface HubSpotRouterTransformationOutput
+ extends Omit {
+ destination: HubSpotDestination;
+ batchedRequest?: HubspotProcessorTransformationOutput | HubspotProcessorTransformationOutput[];
+ metadata: Metadata[] | Partial[];
+}
+
+export interface HubSpotBatchRouterResult {
+ batchedResponseList: HubSpotRouterTransformationOutput[];
+ errorRespList: HubSpotRouterTransformationOutput[];
+ dontBatchEvents: HubSpotRouterTransformationOutput[];
+}
+
+export type HubSpotPropertyV3 = {
+ name: string;
+ hasUniqueValue?: boolean;
+};
+
+export type HubSpotPropertiesV3Response = {
+ results?: HubSpotPropertyV3[];
+};
diff --git a/src/v0/destinations/hs/util.test.js b/src/v0/destinations/hs/util.test.ts
similarity index 54%
rename from src/v0/destinations/hs/util.test.js
rename to src/v0/destinations/hs/util.test.ts
index 2053cd7705c..720656ebb7f 100644
--- a/src/v0/destinations/hs/util.test.js
+++ b/src/v0/destinations/hs/util.test.ts
@@ -1,14 +1,28 @@
-const {
+const mockCacheGet = jest.fn();
+const mockCacheSet = jest.fn();
+
+jest.mock('../../../adapters/network');
+jest.mock('../../util/cache', () =>
+ jest.fn().mockImplementation(() => ({
+ get: mockCacheGet,
+ set: mockCacheSet,
+ })),
+);
+
+import {
getRequestData,
extractIDsForSearchAPI,
validatePayloadDataTypes,
getObjectAndIdentifierType,
removeHubSpotSystemField,
- isIterable,
-} = require('./util');
-const { primaryToSecondaryFields } = require('./config');
+ isUpsertEnabled,
+ isLookupFieldUnique,
+} from './util';
+import { primaryToSecondaryFields } from './config';
+import { HubspotRudderMessage } from './types';
+import { httpGET } from '../../../adapters/network';
-const propertyMap = {
+const propertyMap: Record = {
firstName: 'string',
lstName: 'string',
age: 'number',
@@ -43,8 +57,8 @@ describe('Validate payload data types utility function test cases', () => {
try {
const output = validatePayloadDataTypes(propertyMap, 'age', 'Twenty', 'userAge');
expect(output).toEqual('');
- } catch (error) {
- expect(error.message).toEqual(expectedOutput);
+ } catch (error: unknown) {
+ expect((error as Error).message).toEqual(expectedOutput);
}
});
});
@@ -76,7 +90,7 @@ describe('getObjectAndIdentifierType utility test cases', () => {
mappedToDestination: 'true',
},
};
- const result = getObjectAndIdentifierType(firstMessage);
+ const result = getObjectAndIdentifierType(firstMessage as unknown as HubspotRudderMessage);
expect(result).toEqual({ objectType: 'association', identifierType: 'id' });
});
@@ -106,9 +120,9 @@ describe('getObjectAndIdentifierType utility test cases', () => {
},
};
try {
- getObjectAndIdentifierType(firstMessage);
- } catch (err) {
- expect(err.message).toBe('rETL - external Id not found.');
+ getObjectAndIdentifierType(firstMessage as unknown as HubspotRudderMessage);
+ } catch (err: unknown) {
+ expect((err as Error).message).toBe('rETL - external Id not found.');
}
});
});
@@ -188,7 +202,7 @@ describe('extractUniqueValues utility test cases', () => {
},
];
- const result = extractIDsForSearchAPI(inputs);
+ const result = extractIDsForSearchAPI(inputs as unknown as { message: HubspotRudderMessage }[]);
expect(result).toEqual([
'testhubspot2@email.com',
@@ -199,7 +213,7 @@ describe('extractUniqueValues utility test cases', () => {
});
it('Should return an empty array when the input is empty', () => {
- const inputs = [];
+ const inputs: { message: HubspotRudderMessage }[] = [];
const result = extractIDsForSearchAPI(inputs);
expect(result).toEqual([]);
});
@@ -241,24 +255,6 @@ describe('getRequestDataAndRequestOptions utility test cases', () => {
});
});
-describe('isIterable utility test cases', () => {
- it('should return true when the input is an array', () => {
- const input = [1, 2, 3];
- const result = isIterable(input);
- expect(result).toBe(true);
- });
- it('should return false when the input is null', () => {
- const input = null;
- const result = isIterable(input);
- expect(result).toBe(false);
- });
- it('should return false when the input is undefined', () => {
- const input = undefined;
- const result = isIterable(input);
- expect(result).toBe(false);
- });
-});
-
describe('removeHubSpotSystemField utility test cases', () => {
it('should remove HubSpot system fields from the properties', () => {
const properties = {
@@ -303,3 +299,161 @@ describe('removeHubSpotSystemField utility test cases', () => {
expect(result).toEqual(expectedOutput);
});
});
+
+describe('isUpsertEnabled utility test cases', () => {
+ const originalEnv = process.env;
+
+ beforeEach(() => {
+ // Reset environment variables before each test
+ jest.resetModules();
+ process.env = { ...originalEnv };
+ delete process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES;
+ });
+
+ afterAll(() => {
+ process.env = originalEnv;
+ });
+
+ it('should return true when enabled is ALL', () => {
+ process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES = 'ALL';
+ const result = isUpsertEnabled('workspace123');
+ expect(result).toBe(true);
+ });
+
+ it('should return true when enabled is all (case insensitive)', () => {
+ process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES = 'all';
+ const result = isUpsertEnabled('workspace123');
+ expect(result).toBe(true);
+ });
+
+ it('should return true when workspace is in enabled list', () => {
+ process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES = 'workspace123,workspace456,workspace789';
+ const result = isUpsertEnabled('workspace456');
+ expect(result).toBe(true);
+ });
+
+ it('should return false when workspace is not in enabled list', () => {
+ process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES = 'workspace123,workspace456';
+ const result = isUpsertEnabled('workspace999');
+ expect(result).toBe(false);
+ });
+
+ it('should return false when enabled workspaces env is not set', () => {
+ const result = isUpsertEnabled('workspace123');
+ expect(result).toBe(false);
+ });
+
+ it('should return false when enabled workspaces env is empty string', () => {
+ process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES = '';
+ const result = isUpsertEnabled('workspace123');
+ expect(result).toBe(false);
+ });
+});
+
+describe('isLookupFieldUnique utility test cases', () => {
+ const mockDestination = {
+ ID: 'dest-123',
+ Config: {
+ authorizationType: 'newPrivateAppApi' as const,
+ accessToken: 'test-token',
+ },
+ };
+ const mockMetadata = { jobId: 1 };
+
+ const createV3ApiResponse = (properties: Array<{ name: string; hasUniqueValue?: boolean }>) => ({
+ success: true,
+ response: {
+ data: { results: properties },
+ status: 200,
+ headers: {},
+ },
+ });
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('should return true when lookup field has hasUniqueValue', async () => {
+ const propertiesMap = { email: true, hs_object_id: true };
+ mockCacheGet.mockResolvedValue(propertiesMap);
+
+ const result = await isLookupFieldUnique(mockDestination as any, 'email', mockMetadata as any);
+
+ expect(result).toBe(true);
+ expect(mockCacheGet).toHaveBeenCalledWith('dest-123');
+ expect(httpGET).not.toHaveBeenCalled();
+ });
+
+ it('should return false when lookup field does not have hasUniqueValue', async () => {
+ const propertiesMap = { email: false, custom_field: false };
+ mockCacheGet.mockResolvedValue(propertiesMap);
+
+ const result = await isLookupFieldUnique(mockDestination as any, 'email', mockMetadata as any);
+
+ expect(result).toBe(false);
+ });
+
+ it('should return false when lookup field is not in cached properties and make API call to fetch the properties', async () => {
+ const propertiesMap = { email: true };
+ mockCacheGet.mockResolvedValueOnce(propertiesMap);
+
+ (httpGET as jest.Mock).mockResolvedValue(
+ createV3ApiResponse([
+ { name: 'email', hasUniqueValue: true },
+ { name: 'new_custom_field', hasUniqueValue: true },
+ ]),
+ );
+
+ const result = await isLookupFieldUnique(
+ mockDestination as any,
+ 'new_custom_field',
+ mockMetadata as any,
+ );
+
+ expect(result).toBe(true);
+ expect(httpGET).toHaveBeenCalled();
+ expect(mockCacheSet).toHaveBeenCalledWith(
+ 'dest-123',
+ expect.objectContaining({ email: true, new_custom_field: true }),
+ );
+ });
+
+ it('should fetch from API on cache miss and cache the result', async () => {
+ mockCacheGet.mockReset();
+ mockCacheGet.mockResolvedValue(undefined);
+
+ (httpGET as jest.Mock).mockReset();
+ (httpGET as jest.Mock).mockResolvedValue(
+ createV3ApiResponse([
+ { name: 'email', hasUniqueValue: true },
+ { name: 'hs_object_id', hasUniqueValue: true },
+ ]),
+ );
+
+ const result = await isLookupFieldUnique(mockDestination as any, 'email', mockMetadata as any);
+
+ expect(result).toBe(true);
+ expect(httpGET).toHaveBeenCalled();
+ expect((httpGET as jest.Mock).mock.calls[0][0]).toContain('/crm/v3/properties/contacts');
+ expect(mockCacheSet).toHaveBeenCalledWith(
+ 'dest-123',
+ expect.objectContaining({ email: true, hs_object_id: true }),
+ );
+ });
+
+ it('should return false when lookup field not found after API fetch', async () => {
+ mockCacheGet.mockResolvedValue(undefined);
+
+ (httpGET as jest.Mock).mockResolvedValue(
+ createV3ApiResponse([{ name: 'email', hasUniqueValue: true }]),
+ );
+
+ const result = await isLookupFieldUnique(
+ mockDestination as any,
+ 'nonexistent_field',
+ mockMetadata as any,
+ );
+
+ expect(result).toBe(false);
+ });
+});
diff --git a/src/v0/destinations/hs/util.js b/src/v0/destinations/hs/util.ts
similarity index 58%
rename from src/v0/destinations/hs/util.js
rename to src/v0/destinations/hs/util.ts
index 21c814cba9f..ca2532648e1 100644
--- a/src/v0/destinations/hs/util.js
+++ b/src/v0/destinations/hs/util.ts
@@ -1,21 +1,19 @@
/* eslint-disable no-await-in-loop */
-const chunk = require('lodash/chunk');
-const omit = require('lodash/omit');
-const set = require('set-value');
-const get = require('get-value');
-const {
+import chunk from 'lodash/chunk';
+import omit from 'lodash/omit';
+import set from 'set-value';
+import get from 'get-value';
+import {
NetworkInstrumentationError,
InstrumentationError,
ConfigurationError,
NetworkError,
isDefinedNotNullNotEmpty,
-} = require('@rudderstack/integrations-lib');
-const { httpGET, httpPOST } = require('../../../adapters/network');
-const {
- processAxiosResponse,
- getDynamicErrorType,
-} = require('../../../adapters/utils/networkUtils');
-const {
+} from '@rudderstack/integrations-lib';
+import { AxiosRequestConfig } from 'axios';
+import { httpGET, httpPOST } from '../../../adapters/network';
+import { processAxiosResponse, getDynamicErrorType } from '../../../adapters/utils/networkUtils';
+import {
getFieldValueFromMessage,
constructPayload,
isEmpty,
@@ -26,9 +24,11 @@ const {
validateEventName,
defaultBatchRequestConfig,
getSuccessRespEvents,
-} = require('../../util');
-const {
+ isHttpStatusSuccess,
+} from '../../util';
+import {
CONTACT_PROPERTY_MAP_ENDPOINT,
+ CRM_V3_CONTACT_PROPERTIES_ENDPOINT,
IDENTIFY_CRM_SEARCH_CONTACT,
IDENTIFY_CRM_SEARCH_ALL_OBJECTS,
SEARCH_LIMIT_VALUE,
@@ -37,16 +37,39 @@ const {
DESTINATION,
MAX_CONTACTS_PER_REQUEST,
HUBSPOT_SYSTEM_FIELDS,
-} = require('./config');
-
-const tags = require('../../util/tags');
-const { JSON_MIME_TYPE } = require('../../util/constant');
+ CONTACT_PROPERTIES_CACHE_TTL,
+} from './config';
+
+import Cache from '../../util/cache';
+import tags from '../../util/tags';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import type { Metadata } from '../../../types';
+import type {
+ HubSpotDestination,
+ HubSpotPropertyMap,
+ HubSpotProperty,
+ HubSpotContactRecord,
+ HubSpotLookupFieldInfo,
+ HubSpotLegacyIdentifyProperty,
+ HubSpotSearchRequest,
+ HubSpotSearchResponse,
+ HubSpotSearchResult,
+ HubSpotRouterTransformationOutput,
+ HubspotRouterRequest,
+ HubspotProcessorTransformationOutput,
+ HubspotRudderMessage,
+ HubSpotExternalIdObject,
+ HubSpotTrackEventRequest,
+ HubSpotPropertyV3,
+ HubSpotPropertiesV3Response,
+} from './types';
+import { isDateLike, isHubSpotExternalIdInfo, isHubSpotSearchResponse } from './types';
/**
* validate destination config and check for existence of data
* @param {*} param0
*/
-const validateDestinationConfig = ({ Config }) => {
+const validateDestinationConfig = ({ Config }: HubSpotDestination): ConfigurationError | void => {
if (Config.authorizationType === 'newPrivateAppApi') {
// NEW API
if (!Config.accessToken) {
@@ -63,12 +86,38 @@ const validateDestinationConfig = ({ Config }) => {
}
};
+/**
+ * Adds HubSpot authentication details (headers/params) to a response-like object.
+ * Works for both Private Apps (access token) and legacy API key auth.
+ */
+const addHsAuthentication = <
+ T extends { headers?: Record; params?: Record },
+>(
+ response: T,
+ Config: HubSpotDestination['Config'],
+): T => {
+ if (Config.authorizationType === 'newPrivateAppApi') {
+ // Private Apps
+ response.headers = {
+ ...(response.headers || {}),
+ Authorization: `Bearer ${Config.accessToken}`,
+ };
+ } else {
+ // Legacy API Key
+ response.params = {
+ ...(response.params || {}),
+ hapikey: Config.apiKey,
+ };
+ }
+ return response;
+};
+
/**
* modify the key inorder to suite with HS constraints
* @param {*} key
* @returns
*/
-const formatKey = (key) => {
+const formatKey = (key: string): string => {
// lowercase and replace spaces and . with _
let modifiedKey = key.toLowerCase();
modifiedKey = modifiedKey.replace(/\s+/g, '_');
@@ -81,7 +130,9 @@ const formatKey = (key) => {
* @param {*} message
* @returns
*/
-const fetchFinalSetOfTraits = (message) => {
+const fetchFinalSetOfTraits = (
+ message: HubspotRudderMessage,
+): Record | undefined => {
// get from traits or properties
let traits = getFieldValueFromMessage(message, 'traits');
if (!traits || Object.keys(traits).length === 0) {
@@ -95,8 +146,11 @@ const fetchFinalSetOfTraits = (message) => {
* @param {*} destination
* @returns
*/
-const getProperties = async (destination, metadata) => {
- let hubspotPropertyMap = {};
+const getProperties = async (
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise => {
+ let hubspotPropertyMap: HubSpotPropertyMap = {};
let hubspotPropertyMapResponse;
const { Config } = destination;
@@ -147,9 +201,9 @@ const getProperties = async (destination, metadata) => {
);
}
- const propertyMap = {};
+ const propertyMap: HubSpotPropertyMap = {};
if (hubspotPropertyMapResponse.response && Array.isArray(hubspotPropertyMapResponse.response)) {
- hubspotPropertyMapResponse.response.forEach((element) => {
+ hubspotPropertyMapResponse.response.forEach((element: HubSpotProperty) => {
propertyMap[element.name] = element.type;
});
}
@@ -165,14 +219,19 @@ const getProperties = async (destination, metadata) => {
* @param {*} value
* @param {*} traitsKey
*/
-const validatePayloadDataTypes = (propertyMap, hsSupportedKey, value, traitsKey) => {
+const validatePayloadDataTypes = (
+ propertyMap: HubSpotPropertyMap,
+ hsSupportedKey: string,
+ value: unknown,
+ traitsKey: string,
+): unknown => {
let propValue = value;
// Hub spot data type validations
if (propertyMap[hsSupportedKey] === 'string' && typeof propValue !== 'string') {
if (typeof propValue === 'object') {
propValue = JSON.stringify(propValue);
} else {
- propValue = propValue.toString();
+ propValue = String(propValue);
}
}
@@ -200,7 +259,7 @@ const validatePayloadDataTypes = (propertyMap, hsSupportedKey, value, traitsKey)
* @param {*} propValue
* @returns
*/
-const getUTCMidnightTimeStampValue = (propValue) => {
+const getUTCMidnightTimeStampValue = (propValue: string | number | Date): number => {
const time = propValue;
const date = new Date(time);
date.setUTCHours(0, 0, 0, 0);
@@ -215,34 +274,41 @@ const getUTCMidnightTimeStampValue = (propValue) => {
* @param {*} propertyMap
* @returns
*/
-const getTransformedJSON = async ({ message, destination, metadata }, propertyMap) => {
- let rawPayload = {};
+const getTransformedJSON = async (
+ {
+ message,
+ destination,
+ metadata,
+ }: { message: HubspotRudderMessage; destination: HubSpotDestination; metadata: Metadata },
+ propertyMap?: HubSpotPropertyMap,
+): Promise> => {
+ let rawPayload: Record = {};
const traits = fetchFinalSetOfTraits(message);
if (traits) {
const traitsKeys = Object.keys(traits);
- if (!propertyMap) {
+ let propMap = propertyMap;
+ if (!propMap) {
// fetch HS properties
- // eslint-disable-next-line no-param-reassign
- propertyMap = await getProperties(destination, metadata);
+ propMap = await getProperties(destination, metadata);
}
- rawPayload = constructPayload(message, hsCommonConfigJson);
+ rawPayload = constructPayload(message, hsCommonConfigJson) as Record;
// if there is any extra/custom property in hubspot, that has not already
// been mapped but exists in the traits, we will include those values to the final payload
traitsKeys.forEach((traitsKey) => {
// lowercase and replace ' ' & '.' with '_'
const hsSupportedKey = formatKey(traitsKey);
- if (!rawPayload[traitsKey] && propertyMap[hsSupportedKey]) {
+ if (!rawPayload[traitsKey] && propMap && propMap[hsSupportedKey]) {
// HS accepts empty string to remove the property from contact
// https://community.hubspot.com/t5/APIs-Integrations/Clearing-values-of-custom-properties-in-Hubspot-contact-using/m-p/409156
- let propValue = isNull(traits[traitsKey]) ? '' : traits[traitsKey];
- if (propertyMap[hsSupportedKey] === 'date') {
+ let propValue: unknown = isNull(traits[traitsKey]) ? '' : traits[traitsKey];
+ if (propMap[hsSupportedKey] === 'date' && isDateLike(propValue)) {
propValue = getUTCMidnightTimeStampValue(propValue);
}
rawPayload[hsSupportedKey] = validatePayloadDataTypes(
- propertyMap,
+ propMap,
hsSupportedKey,
propValue,
traitsKey,
@@ -274,7 +340,9 @@ const getTransformedJSON = async ({ message, destination, metadata }, propertyMa
* @param {*} propMap
* @returns
*/
-const formatPropertyValueForIdentify = (propMap) =>
+const formatPropertyValueForIdentify = (
+ propMap: Record,
+): HubSpotLegacyIdentifyProperty[] =>
Object.keys(propMap).map((key) => ({ property: key, value: propMap[key] }));
/**
@@ -283,7 +351,9 @@ const formatPropertyValueForIdentify = (propMap) =>
* @param {*} properties
* @returns
*/
-const getEmailAndUpdatedProps = (properties) => {
+const getEmailAndUpdatedProps = (
+ properties: HubSpotLegacyIdentifyProperty[],
+): { email: unknown; updatedProperties: HubSpotLegacyIdentifyProperty[] } => {
const index = properties.findIndex((prop) => prop.property === 'email');
return {
email: properties[index].value,
@@ -299,7 +369,11 @@ const getEmailAndUpdatedProps = (properties) => {
* @param {*} lookupField destination.Config.lookupField or email
* @returns returns the lookup value
*/
-const getMappingFieldValueFormMessage = (message, sourceKey, lookupField) => {
+const getMappingFieldValueFormMessage = (
+ message: Record,
+ sourceKey: string,
+ lookupField: string | undefined,
+): unknown => {
const baseObject = get(message, `${sourceKey}`);
const lookupValue = baseObject ? baseObject[`${lookupField}`] : null;
return lookupValue;
@@ -312,18 +386,26 @@ const getMappingFieldValueFormMessage = (message, sourceKey, lookupField) => {
* @param {*} lookupField either destination.Config.lookupField or email
* @returns object containing the name of the lookupField and the lookup value
*/
-const getLookupFieldValue = (message, lookupField) => {
+const getLookupFieldValue = (
+ message: Record,
+ lookupField: string | undefined,
+): HubSpotLookupFieldInfo | null => {
const SOURCE_KEYS = ['traits', 'context.traits', 'properties'];
- let value = getValueFromMessage(message, `${lookupField}`);
- if (!value) {
- // Check in free-flowing object level
- SOURCE_KEYS.some((sourceKey) => {
- value = getMappingFieldValueFormMessage(message, sourceKey, lookupField);
- return !!value;
- });
+ const lookUpFields = [lookupField, 'email'];
+ for (const lookUpField of lookUpFields) {
+ let value = getValueFromMessage(message, lookUpField);
+ if (!isDefinedNotNullNotEmpty(value)) {
+ // Check in free-flowing object level
+ SOURCE_KEYS.some((sourceKey) => {
+ value = getMappingFieldValueFormMessage(message, sourceKey, lookUpField);
+ return isDefinedNotNullNotEmpty(value);
+ });
+ }
+ if (isDefinedNotNullNotEmpty(value)) {
+ return { fieldName: lookUpField!, value };
+ }
}
- const lookupValueInfo = value ? { fieldName: lookupField, value } : null;
- return lookupValueInfo;
+ return null;
};
/**
@@ -332,15 +414,18 @@ const getLookupFieldValue = (message, lookupField) => {
* @param {*} destination
* @returns
*/
-const searchContacts = async (message, destination, metadata) => {
+const searchContacts = async (
+ message: Record,
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise => {
const { Config } = destination;
let searchContactsResponse;
- let contactId;
+ let contactId: string | null;
if (!getFieldValueFromMessage(message, 'traits') && !message.properties) {
throw new InstrumentationError('Identify - Invalid traits value for lookup field');
}
- const lookupFieldInfo =
- getLookupFieldValue(message, Config.lookupField) || getLookupFieldValue(message, 'email');
+ const lookupFieldInfo = getLookupFieldValue(message, Config.lookupField);
if (!lookupFieldInfo?.value) {
throw new InstrumentationError(
'Identify:: email i.e a default lookup field for contact lookup not found in traits',
@@ -437,7 +522,11 @@ const searchContacts = async (message, destination, metadata) => {
* @param {*} payload
* @returns
*/
-const getEventAndPropertiesFromConfig = (message, destination, payload) => {
+const getEventAndPropertiesFromConfig = (
+ message: HubspotRudderMessage,
+ destination: HubSpotDestination,
+ payload: HubSpotTrackEventRequest,
+): HubSpotTrackEventRequest => {
const { hubspotEvents } = destination.Config;
let event = get(message, 'event');
@@ -448,10 +537,10 @@ const getEventAndPropertiesFromConfig = (message, destination, payload) => {
throw new InstrumentationError('Event and property mappings are required for track call');
}
validateEventName(event);
- event = event.trim().toLowerCase();
- let eventName;
- let eventProperties;
- const properties = {};
+ event = String(event).trim().toLowerCase();
+ let eventName: string | undefined;
+ let eventProperties: { from: string; to: string }[] | undefined;
+ const properties: Record = {};
// 1. fetch event name from webapp config
// some will traverse through all the indexes of the array and find the event
@@ -462,7 +551,7 @@ const getEventAndPropertiesFromConfig = (message, destination, payload) => {
hubspotEvent.rsEventName.trim().toLowerCase() === event &&
!isEmpty(hubspotEvent.hubspotEventName)
) {
- eventName = hubspotEvent.hubspotEventName.trim();
+ eventName = hubspotEvent.hubspotEventName?.trim();
eventProperties = hubspotEvent.eventProperties;
return true;
}
@@ -476,18 +565,21 @@ const getEventAndPropertiesFromConfig = (message, destination, payload) => {
}
// 2. fetch event properties from webapp config
- eventProperties = getHashFromArray(eventProperties, ...Array(2), false);
+ eventProperties = getHashFromArray(eventProperties, 'from', 'to', false) as {
+ from: string;
+ to: string;
+ }[];
Object.keys(eventProperties).forEach((key) => {
const value = get(message, `properties.${key}`);
if (isDefinedNotNullNotEmpty(value)) {
- properties[eventProperties[key]] = value;
+ properties[eventProperties?.[key]] = value;
}
});
// eslint-disable-next-line no-param-reassign
- payload = { ...payload, eventName, properties };
- return payload;
+ const result = { ...payload, eventName, properties };
+ return result;
};
/**
@@ -495,11 +587,12 @@ const getEventAndPropertiesFromConfig = (message, destination, payload) => {
* @param {*} firstMessage
* @returns
*/
-const getObjectAndIdentifierType = (firstMessage) => {
- const { objectType, identifierType } = getDestinationExternalIDInfoForRetl(
- firstMessage,
- DESTINATION,
- );
+const getObjectAndIdentifierType = (
+ firstMessage: HubspotRudderMessage,
+): { objectType: string; identifierType: string } => {
+ const rawInfo = getDestinationExternalIDInfoForRetl(firstMessage, DESTINATION);
+ const externalIdInfo = isHubSpotExternalIdInfo(rawInfo) ? rawInfo : null;
+ const { objectType, identifierType } = externalIdInfo || {};
if (!objectType || !identifierType) {
throw new InstrumentationError('rETL - external Id not found.');
}
@@ -511,11 +604,13 @@ const getObjectAndIdentifierType = (firstMessage) => {
* @param {*} inputs
* @returns
*/
-const extractIDsForSearchAPI = (inputs) => {
+const extractIDsForSearchAPI = (inputs: { message: HubspotRudderMessage }[]): string[] => {
const values = inputs.map((input) => {
const { message } = input;
- const { destinationExternalId } = getDestinationExternalIDInfoForRetl(message, DESTINATION);
- return destinationExternalId.toString().toLowerCase();
+ const rawInfo = getDestinationExternalIDInfoForRetl(message, DESTINATION);
+ const externalIdInfo = isHubSpotExternalIdInfo(rawInfo) ? rawInfo : null;
+ const destExternalId = externalIdInfo?.destinationExternalId;
+ return String(destExternalId ?? '').toLowerCase();
});
return Array.from(new Set(values));
@@ -532,15 +627,15 @@ const extractIDsForSearchAPI = (inputs) => {
* @returns
*/
const performHubSpotSearch = async (
- reqdata,
- reqOptions,
- objectType,
- identifierType,
- destination,
- metadata,
-) => {
- let checkAfter = 1;
- const searchResults = [];
+ reqdata: HubSpotSearchRequest,
+ reqOptions: AxiosRequestConfig,
+ objectType: string,
+ identifierType: string,
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise => {
+ let checkAfter: number | string = 1;
+ const searchResults: HubSpotContactRecord[] = [];
const requestData = reqdata;
const { Config } = destination;
@@ -560,7 +655,7 @@ const performHubSpotSearch = async (
* */
while (checkAfter) {
- const searchResponse = await httpPOST(url, requestData, requestOptions, {
+ const httpResponse = await httpPOST(url, requestData, requestOptions, {
destType: 'hs',
feature: 'transformation',
endpointPath,
@@ -569,9 +664,9 @@ const performHubSpotSearch = async (
metadata,
});
- const processedResponse = processAxiosResponse(searchResponse);
+ const processedResponse = processAxiosResponse(httpResponse);
- if (processedResponse.status !== 200) {
+ if (!isHttpStatusSuccess(processedResponse.status)) {
throw new NetworkError(
`rETL - Error during searching object record. ${JSON.stringify(
processedResponse.response?.message,
@@ -584,18 +679,21 @@ const performHubSpotSearch = async (
);
}
- const after = processedResponse.response?.paging?.next?.after || 0;
- requestData.after = after; // assigning to the new value of after
+ const rawResponse = processedResponse.response;
+ const searchApiResponse: HubSpotSearchResponse = isHubSpotSearchResponse(rawResponse)
+ ? rawResponse
+ : { results: [] };
+ const after = searchApiResponse?.paging?.next?.after || 0;
+ requestData.after = Number(after); // assigning to the new value of after
checkAfter = after; // assigning to the new value if no after we assign it to 0 and no more calls will take place
-
- const results = processedResponse.response?.results;
+ const results = searchApiResponse?.results;
const extraProp = primaryToSecondaryFields[identifierType];
if (results) {
searchResults.push(
- ...results.map((result) => {
- const contact = {
+ ...results.map((result: HubSpotSearchResult) => {
+ const contact: HubSpotContactRecord = {
id: result.id,
- property: result.properties[identifierType],
+ property: String(result.properties[identifierType] || ''),
};
// Following maps the extra property to the contact object which
// help us to know if the contact was found using secondary property
@@ -622,8 +720,8 @@ const performHubSpotSearch = async (
* @param {*} chunkValue
* @returns
*/
-const getRequestData = (identifierType, chunkValue) => {
- const requestData = {
+const getRequestData = (identifierType: string, chunkValue: string[]): HubSpotSearchRequest => {
+ const requestData: HubSpotSearchRequest = {
filterGroups: [
{
filters: [
@@ -656,7 +754,7 @@ const getRequestData = (identifierType, chunkValue) => {
},
],
});
- requestData.properties.push(secondaryProp);
+ requestData.properties?.push(secondaryProp);
}
return requestData;
};
@@ -666,9 +764,13 @@ const getRequestData = (identifierType, chunkValue) => {
* @param {*} inputs
* @param {*} destination
*/
-const getExistingContactsData = async (inputs, destination, metadata) => {
+const getExistingContactsData = async (
+ inputs: { message: HubspotRudderMessage }[],
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise => {
const { Config } = destination;
- const hsIdsToBeUpdated = [];
+ const hsIdsToBeUpdated: HubSpotContactRecord[] = [];
const firstMessage = inputs[0].message;
if (!firstMessage) {
@@ -708,15 +810,19 @@ const getExistingContactsData = async (inputs, destination, metadata) => {
* @param {*} useSecondaryProp -> Let us know if that id was found using secondary property and not primnary
* @returns
*/
-const setHsSearchId = (input, id, useSecondaryProp = false) => {
+const setHsSearchId = (
+ input: { message: HubspotRudderMessage },
+ id: string,
+ useSecondaryProp = false,
+): HubSpotExternalIdObject[] => {
const { message } = input;
- const resultExternalId = [];
+ const resultExternalId: HubSpotExternalIdObject[] = [];
const externalIdArray = message.context?.externalId;
if (externalIdArray) {
externalIdArray.forEach((extIdObj) => {
const { type } = extIdObj;
const extIdObjParam = extIdObj;
- if (type.includes(DESTINATION)) {
+ if (type && type.includes(DESTINATION)) {
extIdObjParam.hsSearchId = id;
}
if (useSecondaryProp) {
@@ -739,30 +845,39 @@ const setHsSearchId = (input, id, useSecondaryProp = false) => {
* For email as primary key we use `hs_additional_emails` as well property to search existing contacts
* */
-const splitEventsForCreateUpdate = async (inputs, destination, metadata) => {
+const splitEventsForCreateUpdate = async (
+ inputs: HubspotRouterRequest[],
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise => {
// get all the id and properties of already existing objects needed for update.
const hsIdsToBeUpdated = await getExistingContactsData(inputs, destination, metadata);
const resultInput = inputs.map((input) => {
const { message } = input;
const inputParam = input;
- const { destinationExternalId, identifierType } = getDestinationExternalIDInfoForRetl(
- message,
- DESTINATION,
- );
+ const rawInfo = getDestinationExternalIDInfoForRetl(message, DESTINATION);
+ const externalIdInfo = isHubSpotExternalIdInfo(rawInfo) ? rawInfo : null;
+ const destinationExternalId = externalIdInfo?.destinationExternalId;
+ const identifierType = externalIdInfo?.identifierType;
const filteredInfo = hsIdsToBeUpdated.filter(
(update) =>
- update.property.toString().toLowerCase() === destinationExternalId.toString().toLowerCase(), // second condition is for secondary property for identifier type
+ destinationExternalId &&
+ update.property.toString().toLowerCase() === String(destinationExternalId).toLowerCase(), // second condition is for secondary property for identifier type
);
+ const { context } = message;
if (filteredInfo.length > 0) {
- inputParam.message.context.externalId = setHsSearchId(input, filteredInfo[0].id);
- inputParam.message.context.hubspotOperation = 'updateObject';
+ inputParam.message.context = {
+ ...context,
+ externalId: setHsSearchId(input, filteredInfo[0].id),
+ hubspotOperation: 'updateObject',
+ };
return inputParam;
}
- const secondaryProp = primaryToSecondaryFields[identifierType];
- if (secondaryProp) {
+ const secondaryProp = identifierType ? primaryToSecondaryFields[identifierType] : undefined;
+ if (secondaryProp && destinationExternalId) {
/* second condition is for secondary property for identifier type
For example:
update[secondaryProp] = "abc@e.com;cd@e.com;k@w.com"
@@ -775,35 +890,38 @@ const splitEventsForCreateUpdate = async (inputs, destination, metadata) => {
?.toString()
.toLowerCase()
.split(';')
- .includes(destinationExternalId.toString().toLowerCase()),
+ .includes(String(destinationExternalId).toLowerCase()),
);
if (filteredInfoForSecondaryProp.length > 0) {
- inputParam.message.context.externalId = setHsSearchId(
- input,
- filteredInfoForSecondaryProp[0].id,
- true,
- );
- inputParam.message.context.hubspotOperation = 'updateObject';
+ inputParam.message.context = {
+ ...context,
+ externalId: setHsSearchId(input, filteredInfoForSecondaryProp[0].id, true),
+ hubspotOperation: 'updateObject',
+ };
return inputParam;
}
}
// if not found in the existing contacts, then it's a new contact
- inputParam.message.context.hubspotOperation = 'createObject';
+ inputParam.message.context = {
+ ...context,
+ hubspotOperation: 'createObject',
+ };
return inputParam;
});
return resultInput;
};
-const getHsSearchId = (message) => {
- const externalIdArray = message.context?.externalId;
- let hsSearchId = null;
+const getHsSearchId = (message: HubspotRudderMessage): { hsSearchId: string | null } => {
+ const { context } = message;
+ const externalIdArray = context?.externalId;
+ let hsSearchId: string | null = null;
if (externalIdArray) {
externalIdArray.forEach((extIdObj) => {
const { type } = extIdObj;
- if (type.includes(DESTINATION)) {
- hsSearchId = extIdObj.hsSearchId;
+ if (typeof type === 'string' && type.includes(DESTINATION)) {
+ hsSearchId = extIdObj.hsSearchId || null;
}
});
}
@@ -816,7 +934,12 @@ const getHsSearchId = (message) => {
* @param {*} traits
* @param {*} destination
*/
-const populateTraits = async (propertyMap, traits, destination, metadata) => {
+const populateTraits = async (
+ propertyMap: HubSpotPropertyMap | undefined,
+ traits: Record,
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise> => {
const populatedTraits = traits;
let propertyToTypeMap = propertyMap;
if (!propertyToTypeMap) {
@@ -827,7 +950,7 @@ const populateTraits = async (propertyMap, traits, destination, metadata) => {
const keys = Object.keys(populatedTraits);
keys.forEach((key) => {
const value = populatedTraits[key];
- if (propertyToTypeMap[key] === 'date') {
+ if (propertyToTypeMap && propertyToTypeMap[key] === 'date' && isDateLike(value)) {
populatedTraits[key] = getUTCMidnightTimeStampValue(value);
}
});
@@ -835,9 +958,11 @@ const populateTraits = async (propertyMap, traits, destination, metadata) => {
return populatedTraits;
};
-const addExternalIdToHSTraits = (message) => {
- const externalIdObj = message.context?.externalId?.[0];
- if (externalIdObj.useSecondaryObject) {
+const addExternalIdToHSTraits = (message: HubspotRudderMessage): void => {
+ const { context } = message;
+ const externalIdArray = context?.externalId;
+ const externalIdObj = externalIdArray?.[0];
+ if (externalIdObj?.useSecondaryObject) {
/* this condition help us to NOT override the primary key value with the secondary key value
example:
for `email` as primary key and `hs_additonal_emails` as secondary key we don't want to override `email` with `hs_additional_emails`.
@@ -848,19 +973,31 @@ const addExternalIdToHSTraits = (message) => {
set(getFieldValueFromMessage(message, 'traits'), externalIdObj.identifierType, externalIdObj.id);
};
-const convertToResponseFormat = (successRespListWithDontBatchTrue) => {
- const response = [];
+const convertToResponseFormat = (
+ successRespListWithDontBatchTrue: {
+ message: HubspotProcessorTransformationOutput;
+ metadata: Partial;
+ destination: HubSpotDestination;
+ }[],
+): HubSpotRouterTransformationOutput[] => {
+ const response: HubSpotRouterTransformationOutput[] = [];
if (Array.isArray(successRespListWithDontBatchTrue)) {
successRespListWithDontBatchTrue.forEach((event) => {
const { message, metadata, destination } = event;
- const endpoint = get(message, 'endpoint');
+ const endpoint =
+ typeof message.endpoint === 'string'
+ ? message.endpoint
+ : String(get(message, 'endpoint') ?? '');
const batchedResponse = defaultBatchRequestConfig();
- batchedResponse.batchedRequest.headers = message.headers;
+ batchedResponse.batchedRequest.headers = message.headers!;
batchedResponse.batchedRequest.endpoint = endpoint;
- batchedResponse.batchedRequest.body = message.body;
- batchedResponse.batchedRequest.params = message.params;
- batchedResponse.batchedRequest.method = message.method;
+ batchedResponse.batchedRequest.body = {
+ ...batchedResponse.batchedRequest.body,
+ ...message.body,
+ };
+ batchedResponse.batchedRequest.params = message.params!;
+ batchedResponse.batchedRequest.method = message.method!;
batchedResponse.metadata = [metadata];
batchedResponse.destination = destination;
@@ -876,19 +1013,133 @@ const convertToResponseFormat = (successRespListWithDontBatchTrue) => {
return response;
};
-const isIterable = (obj) => {
- // checks for null and undefined
- if (obj == null) {
- return false;
+// remove system fields from the properties because they are not allowed to be updated
+const removeHubSpotSystemField = (properties: Record): Record =>
+ omit(properties, HUBSPOT_SYSTEM_FIELDS);
+
+// Cache for HubSpot contact properties (V3 API) - stores hasUniqueValue per property
+// TTL: 1 hour - property definitions rarely change
+const uniqueContactPropertiesCache = new Cache(
+ 'HS_CONTACT_PROPERTIES_V3',
+ CONTACT_PROPERTIES_CACHE_TTL,
+ {
+ destType: DESTINATION,
+ },
+);
+
+/**
+ * Fetches contact properties from HubSpot CRM V3 API.
+ * Ref - https://developers.hubspot.com/docs/api-reference/crm-properties-v3/core/get-crm-v3-properties-objectType
+ *
+ * @param destination - HubSpot destination config
+ * @param metadata - Request metadata
+ * @returns Map of property name -> hasUniqueValue
+ */
+const fetchContactPropertiesV3 = async (
+ destination: HubSpotDestination,
+ metadata: Metadata,
+): Promise> => {
+ const { Config } = destination;
+ const statTags = {
+ destType: DESTINATION,
+ feature: 'transformation',
+ endpointPath: '/crm/v3/properties/contacts',
+ requestMethod: 'GET',
+ module: 'router',
+ metadata,
+ };
+ const authenticationInfo = addHsAuthentication({}, Config);
+ const response = await httpGET(CRM_V3_CONTACT_PROPERTIES_ENDPOINT, authenticationInfo, statTags);
+
+ const processedResponse = processAxiosResponse(response);
+ if (processedResponse.status !== 200) {
+ throw new NetworkError(
+ `Failed to fetch HubSpot contact properties: ${JSON.stringify(processedResponse.response)}`,
+ processedResponse.status,
+ {
+ [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(processedResponse.status),
+ },
+ processedResponse,
+ );
}
- return typeof obj[Symbol.iterator] === 'function';
+
+ const body = processedResponse.response as HubSpotPropertiesV3Response;
+ const results = body?.results ?? [];
+ const map: Record = {};
+ results.forEach((prop: HubSpotPropertyV3) => {
+ map[prop.name] = Boolean(prop.hasUniqueValue);
+ });
+ return map;
};
-// remove system fields from the properties because they are not allowed to be updated
-const removeHubSpotSystemField = (properties) => omit(properties, HUBSPOT_SYSTEM_FIELDS);
+/**
+ * Checks if the lookup field has unique value constraint in HubSpot.
+ * Uses in-memory cache to avoid repeated API calls.
+ * Refetches when lookup field is not in cache (handles new custom fields added after cache).
+ * Upsert endpoint requires hasUniqueValue=true for the lookup field.
+ *
+ * @param destination - HubSpot destination config
+ * @param lookupField - The configured lookup field (e.g. email, hs_object_id)
+ * @param metadata - Request metadata
+ * @returns true if lookupField has hasUniqueValue=true, false otherwise
+ */
+const isLookupFieldUnique = async (
+ destination: HubSpotDestination,
+ lookupField: string,
+ metadata: Metadata,
+): Promise => {
+ const cacheKey = destination.ID;
+
+ const isFieldInMap = (map: Record) => lookupField in map;
+
+ let propertiesMap = (await uniqueContactPropertiesCache.get(cacheKey)) as
+ | Record
+ | undefined;
+
+ // Refetch if cache miss OR lookup field not in cached data (e.g. new custom field added)
+ if (!propertiesMap || !isFieldInMap(propertiesMap)) {
+ propertiesMap = await fetchContactPropertiesV3(destination, metadata);
+ if (propertiesMap) {
+ uniqueContactPropertiesCache.set(cacheKey, propertiesMap);
+ }
+ }
+
+ if (!propertiesMap) return false;
+ return propertiesMap[lookupField] ?? false;
+};
+
+/**
+ * Determines if the upsert feature is enabled for a given workspace.
+ *
+ * Logic:
+ * 1. If ENABLED = "ALL" -> return true
+ * 2. If workspaceId in ENABLED list -> return true
+ * 3. Default -> return false
+ *
+ * @param workspaceId - The workspace ID to check
+ * @returns Whether upsert is enabled for this workspace
+ */
+const isUpsertEnabled = (workspaceId: string): boolean => {
+ const enabledWorkspaces = process.env.HUBSPOT_UPSERT_ENABLED_WORKSPACES || '';
+
+ // Check if enabled for all workspaces
+ if (enabledWorkspaces.trim().toUpperCase() === 'ALL') {
+ return true;
+ }
+
+ // Check if workspace is in the enabled list
+ if (enabledWorkspaces && workspaceId) {
+ const enabledList = enabledWorkspaces.split(',').map((ws) => ws.trim());
+ return enabledList.includes(workspaceId);
+ }
+
+ // Default: upsert not enabled
+ return false;
+};
-module.exports = {
+export {
validateDestinationConfig,
+ addHsAuthentication,
addExternalIdToHSTraits,
formatKey,
fetchFinalSetOfTraits,
@@ -907,6 +1158,8 @@ module.exports = {
extractIDsForSearchAPI,
getRequestData,
convertToResponseFormat,
- isIterable,
removeHubSpotSystemField,
+ isUpsertEnabled,
+ getLookupFieldValue,
+ isLookupFieldUnique,
};
diff --git a/src/v0/destinations/impact/transform.js b/src/v0/destinations/impact/transform.js
index 4d266e9c4b4..3b7544eaa87 100644
--- a/src/v0/destinations/impact/transform.js
+++ b/src/v0/destinations/impact/transform.js
@@ -172,7 +172,9 @@ const processTrackEvent = (message, Config) => {
respArray.push(responseBuilder(payload, endpoint, Config));
}
if (eventType.includes('action')) {
- payload.ClickId = message.context?.referrer?.id || message.properties?.clickId;
+ if (!isDefinedAndNotNull(payload.ClickId)) {
+ payload.ClickId = message.context?.referrer?.id || message.properties?.clickId;
+ }
respArray.push(responseBuilder(payload, endpoint, Config));
}
return respArray;
diff --git a/src/v0/destinations/iterable/deleteUsers.js b/src/v0/destinations/iterable/deleteUsers.js
index 9c38f5f692f..f97e00fccae 100644
--- a/src/v0/destinations/iterable/deleteUsers.js
+++ b/src/v0/destinations/iterable/deleteUsers.js
@@ -1,8 +1,9 @@
const {
- NetworkError,
ConfigurationError,
+ NetworkError,
forEachInBatches,
} = require('@rudderstack/integrations-lib');
+const { DeleteUsersError } = require('../../util/errorTypes');
const { httpDELETE } = require('../../../adapters/network');
const { processAxiosResponse } = require('../../../adapters/utils/networkUtils');
const { isHttpStatusSuccess } = require('../../util');
@@ -73,7 +74,7 @@ const userDeletionHandler = async (userAttributes, config) => {
);
if (failedUserDeletions.length > 0) {
- throw new NetworkError(
+ const networkError = new NetworkError(
`User deletion request failed for userIds : ${JSON.stringify(failedUserDeletions)}`,
400,
{
@@ -81,6 +82,10 @@ const userDeletionHandler = async (userAttributes, config) => {
},
failedUserDeletions,
);
+ throw new DeleteUsersError(
+ networkError,
+ `User deletion request failed. Reasons: ${failedUserDeletions.map((item) => item.Reason).join(', ')}`,
+ );
}
return { statusCode: 200, status: 'successful' };
diff --git a/src/v0/destinations/klaviyo/README.md b/src/v0/destinations/klaviyo/README.md
new file mode 100644
index 00000000000..82c7dc513fd
--- /dev/null
+++ b/src/v0/destinations/klaviyo/README.md
@@ -0,0 +1,369 @@
+# Klaviyo Destination
+
+Implementation in **JavaScript**
+
+## Configuration
+
+### Required Settings
+
+- **Private API Key**: Required for authentication with Klaviyo REST API
+
+ - Passed via `Authorization: Klaviyo-API-Key {privateApiKey}` header
+ - Must have appropriate permissions for profiles, events, and subscriptions
+
+- **Public API Key**: Required for client-side SDK implementations (device mode)
+
+- **API Version**: Specifies the Klaviyo API revision to use
+ - `v1`: Uses revision `2023-02-22` (deprecated, scheduled for removal)
+ - `v2`: Uses revision `2024-10-15` (recommended, default)
+
+### Optional Settings
+
+- **List ID**: Default list for subscribing users during identify calls
+
+ - Used when `subscribe` trait is set to `true` in the event
+
+- **Flatten Properties**: Enable to flatten nested user/event properties (default: `false`)
+
+ - Transforms nested objects into dot-notation keys
+
+- **Enforce Email As Primary**: When enabled, uses email or phone as primary identifier instead of external_id (default: `false`)
+
+- **Consent**: Array of consent channels to apply (default: `["email"]`)
+
+ - Options: `email`, `sms`
+ - Controls which marketing channels users are subscribed to
+
+- **Event Filtering**: Control which events are sent to Klaviyo
+ - `eventFilteringOption`: `disable`, `whitelistedEvents`, or `blacklistedEvents`
+ - `whitelistedEvents`: Array of allowed event names
+ - `blacklistedEvents`: Array of blocked event names
+
+## Integration Functionalities
+
+> Klaviyo supports **Cloud mode** and **Device mode**
+
+### Supported Message Types
+
+| Connection Mode | Message Types |
+| --------------- | ------------------------------ |
+| Cloud | identify, track, screen, group |
+| Device (web) | identify, track, page |
+
+### Batching Support
+
+- **Supported**: Yes (both V1 and V2)
+- **Message Types**: Subscription events (subscribe/unsubscribe)
+- **Batch Limits**:
+ - Subscription events: 100 profiles per batch
+ - Profile and Track events: Not batched together with subscriptions
+
+Both API versions use `MAX_BATCH_SIZE` (100) when chunking subscription requests. V1 batches subscribe requests to `POST /api/profile-subscription-bulk-create-jobs`. V2 batches both subscribe requests to `POST /api/profile-subscription-bulk-create-jobs` and unsubscribe requests to `POST /api/profile-subscription-bulk-delete-jobs`. Profile and track events are sent individually.
+
+### Rate Limits
+
+Klaviyo uses a fixed-window rate limiting algorithm with burst (1-second) and steady (1-minute) windows. Rate limits are per-account.
+
+#### Rate Limit Tiers
+
+| Tier | Burst (per second) | Steady (per minute) |
+| ---- | ------------------ | ------------------- |
+| XS | 1 | 15 |
+| S | 3 | 60 |
+| M | 10 | 150 |
+| L | 75 | 700 |
+| XL | 350 | 3500 |
+
+#### Endpoint-Specific Rate Limits
+
+The following endpoints are used by the Klaviyo transformer:
+
+| Endpoint | Purpose | Burst | Steady | Docs Reference |
+| ------------------------------------------------- | ------------------------------ | ----- | ------ | ------------------------------------------------------------------------------------------------ |
+| `POST /api/profiles` | Create profile (V1) | 75/s | 700/m | [Create Profile](https://developers.klaviyo.com/en/reference/create_profile) |
+| `PATCH /api/profiles/{id}` | Update profile (V1) | 75/s | 700/m | [Update Profile](https://developers.klaviyo.com/en/reference/update_profile) |
+| `POST /api/profile-import` | Create/Update profile (V2) | 75/s | 700/m | [Create or Update Profile](https://developers.klaviyo.com/en/reference/create_or_update_profile) |
+| `POST /api/events` | Create event | 350/s | 3500/m | [Create Event](https://developers.klaviyo.com/en/reference/create_event) |
+| `POST /api/profile-subscription-bulk-create-jobs` | Subscribe profiles to list | 75/s | 700/m | [Bulk Subscribe](https://developers.klaviyo.com/en/reference/bulk_subscribe_profiles) |
+| `POST /api/profile-subscription-bulk-delete-jobs` | Unsubscribe profiles from list | 75/s | 700/m | [Bulk Unsubscribe](https://developers.klaviyo.com/en/reference/bulk_unsubscribe_profiles) |
+
+#### Rate Limit Headers
+
+Non-429 responses include these headers:
+
+- `RateLimit-Limit`: Maximum requests per time period
+- `RateLimit-Remaining`: Approximate requests left in current window
+- `RateLimit-Reset`: Seconds until window resets
+
+#### Handling Rate Limit Errors
+
+When rate limits are exceeded, Klaviyo returns `HTTP 429` with a `Retry-After` header. Implement exponential backoff with randomization to avoid thundering herd effects.
+
+[Docs Reference](https://developers.klaviyo.com/en/docs/rate_limits_and_error_handling)
+
+### Payload Limits
+
+| Constraint | Value |
+| ------------------------ | ------------------- |
+| Max payload size | 5 MB (decompressed) |
+| Max properties per event | 400 |
+| Max string field size | 100 KB |
+| Max array items | 4,000 |
+| Max nested object levels | 10 |
+| Max profile payload | 100 KB |
+
+### Intermediate Calls
+
+#### Identify Flow (V1 API - Two-Step Profile Creation)
+
+- **Supported**: Yes
+- **Use Case**: Create or update profile, then optionally subscribe to list
+- **First Call**: `POST /api/profiles` - Create profile (returns 201 or 409 for duplicate)
+- **Second Call**: `PATCH /api/profiles/{profileId}` - Update profile with additional properties (if profile already exists)
+- **Optional Third Call**: `POST /api/profile-subscription-bulk-create-jobs` - Subscribe to list (if `subscribe` trait is true and `listId` is configured)
+
+```javascript
+// The condition that triggers subscription:
+if (traitsInfo.subscribe && (message.context?.externalId || listId)) {
+ // Add subscription request
+}
+```
+
+#### Profile Deduplication Handling
+
+When creating a profile that already exists (409 Conflict response), the transformer:
+
+1. Extracts the duplicate profile ID from `errors[0].meta.duplicate_profile_id`
+2. Uses this ID for subsequent PATCH operations instead of failing
+
+### Profile Identification
+
+- **External ID**: Uses `userId` or `context.externalId` with type `klaviyo-profileId`
+- **Alias Support**: Creates profiles with email/phone as primary identifier when `enforceEmailAsPrimary` is enabled
+- **Phone Number Validation**: V2 API requires E.164 format (e.g., `+15551234567`)
+
+### Proxy Delivery
+
+- **Supported**: No explicit proxy handler found
+- The transformer implements careful event ordering for delivery consistency
+
+### User Deletion
+
+- **Full Profile Deletion**: Not supported by Klaviyo API
+- **List Unsubscription**: Supported via `POST /api/profile-subscription-bulk-delete-jobs`
+ - Used when GROUP event has `subscribe: false` trait
+
+### OAuth Support
+
+- **Supported**: No (uses API key authentication)
+
+### Additional Functionalities
+
+#### E-commerce Event Mapping (V1 API Only)
+
+Special handling for e-commerce events with automatic event name conversion is **supported only when using the V1 API** (`apiVersion: v1`). The V2 API passes event names through as-is without conversion.
+
+| RudderStack Event | Klaviyo Event (V1) |
+| ------------------ | ------------------ |
+| `product viewed` | `Viewed Product` |
+| `product clicked` | `Viewed Product` |
+| `product added` | `Added to Cart` |
+| `checkout started` | `Started Checkout` |
+
+When using the V2 API, send events with the desired Klaviyo metric names directly (e.g., `Viewed Product`, `Added to Cart`).
+
+#### Metadata Operations (V2 API)
+
+The V2 API supports advanced profile operations via integrations object:
+
+```javascript
+// Via integrations.Klaviyo in the event
+{
+ "fieldsToUnset": ["old_property"], // Remove properties from profile
+ "fieldsToAppend": ["list_property"], // Append to array properties
+ "fieldsToUnappend": ["list_property"] // Remove from array properties
+}
+```
+
+#### Custom Properties Extraction
+
+- Extracts custom properties from traits/properties beyond standard Klaviyo fields
+- Optionally flattens nested objects when `flattenProperties` is enabled
+- E-commerce events exclude specific keys from custom properties (product_id, sku, price, etc.)
+
+#### Suppress Events Feature
+
+- When a profile is newly created (201 response), the transformer can return a 299 status code
+- This signals to suppress duplicate processing for events that created the profile
+
+## General Queries
+
+### Event Ordering
+
+#### Identify, Group
+
+These event types modify user profiles and list subscriptions. Event ordering is important to avoid:
+
+- Stale profile data overwriting newer updates
+- Incorrect subscription states
+
+**Recommendation**: Maintain ordering for profile-modifying events.
+
+#### Track, Screen
+
+Track events include a `time` field populated from the event's timestamp. Klaviyo processes events based on this timestamp, reducing strict ordering requirements.
+
+However, if track events include profile attributes, those attributes should still be ordered.
+
+> For best results, maintain event ordering for all event types that modify profile data.
+
+### Data Replay Feasibility
+
+#### Missing Data Replay
+
+- **Identify Events**: Feasible with caution. Profile updates are idempotent, but ordering matters.
+- **Track Events**: Feasible. Each event has a unique_id (derived from messageId) to prevent duplicates.
+- **Group Events**: Feasible. Subscription state can be re-established.
+
+#### Already Delivered Data Replay
+
+- **Track Events**: Klaviyo supports `unique_id` field to identify duplicate events
+
+ - Events with the same `unique_id`, metric, and profile are deduplicated
+ - The transformer uses `messageId` as `unique_id`
+ - Replay is feasible if events have consistent messageIds
+
+- **Identify Events**: Profile updates are idempotent (same data produces same result)
+ - Replay is feasible but may trigger unnecessary API calls
+
+### Multiplexing
+
+- **Supported**: Yes
+- **Description**: The Klaviyo destination can generate multiple API calls from a single input event.
+
+#### Multiplexing Scenarios
+
+1. **Identify Events with Subscription**:
+
+ - **Multiplexing**: YES (both V1 and V2)
+ - **Condition**: `subscribe` trait is defined and `listId` is configured
+
+ | API | First API Call | Second API Call |
+ | --- | -------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------- |
+ | V1 | `POST /api/profiles` or `PATCH /api/profiles/{id}` - Create/update profile | `POST /api/profile-subscription-bulk-create-jobs` - Subscribe to list |
+ | V2 | `POST /api/profile-import` - Create/update profile | `POST /api/profile-subscription-bulk-create-jobs` (subscribe) or `POST /api/profile-subscription-bulk-delete-jobs` (unsubscribe) |
+
+ - **Note**: V1 only supports subscription (`subscribe: true`). V2 supports both subscribe and unsubscribe based on the `subscribe` trait value.
+
+2. **Group Events**:
+
+ - **Multiplexing**: NO (both V1 and V2)
+ - Single call to subscription endpoint based on `subscribe` trait value
+ - **V1**: Subscribe only (`subscribe: true` required) → `POST /api/profile-subscription-bulk-create-jobs`
+ - **V2**: Subscribe or unsubscribe → `POST /api/profile-subscription-bulk-create-jobs` or `POST /api/profile-subscription-bulk-delete-jobs`
+
+3. **Track/Screen Events**:
+
+ - **Multiplexing**: NO (both V1 and V2)
+ - Single call to `POST /api/events`
+
+#### Event Type Grouping
+
+The transformer groups consecutive events of the same type to maintain ordering while optimizing batching:
+
+```javascript
+// Input: ['user1 track1', 'user1 identify 1', 'user1 track 2', 'user2 identify 1']
+// Grouped: [['user1 track1'], ['user1 identify 1', 'user2 identify 1'], ['user1 track 2']]
+```
+
+## Version Information
+
+### Current Version
+
+- **V1 API**: Revision `2023-02-22` (deprecated)
+- **V2 API**: Revision `2024-10-15` (current default)
+
+### API Version Lifecycle
+
+Klaviyo provides **2 years** of support for each API revision:
+
+1. **Stable** (Year 1): Active support, non-breaking changes only
+2. **Deprecated** (Year 2): Use discouraged, migration recommended
+3. **Retired** (Year 3+): No longer supported, breakages likely
+
+### Version Deprecation Schedule
+
+| Revision | Status | Planned Retirement |
+| ---------- | ---------- | ------------------ |
+| 2023-02-22 | Deprecated | ~February 2025 |
+| 2024-10-15 | Stable | ~October 2026 |
+
+**Recommendation**: Use `apiVersion: v2` for new integrations and upgrade existing integrations to avoid deprecation.
+
+### Breaking Changes Between Versions
+
+- V2 requires E.164 phone number format
+- V2 uses `/api/profile-import` instead of `/api/profiles` for profile creation
+- V2 supports additional metadata operations (fieldsToUnset, fieldsToAppend, fieldsToUnappend)
+
+[Version Policy Reference](https://developers.klaviyo.com/en/docs/api_versioning_and_deprecation_policy)
+
+## Processor vs Router Destination
+
+- **Type**: Router destination (`transformAtV1: "router"`)
+- This enables batching of subscription events and event ordering optimization
+
+## Partial Batching Response Handling
+
+- No explicit `networkHandler.js` found in the destination directory, not on transformer Proxy
+- Error handling for partial batch failures is not implemented
+
+## Validations
+
+### Required Fields
+
+| Event Type | Required Fields |
+| ---------- | ----------------------------------------------------------------- |
+| Identify | At least one of: `email`, `phone_number`, `external_id`, `userId` |
+| Track | `event` name, profile identifier |
+| Group | `groupId` (used as list ID), `subscribe` trait |
+| Screen | `name`, profile identifier |
+
+### Phone Number Format
+
+- V2 API: Must be E.164 format (e.g., `+15551234567`)
+- Throws `InstrumentationError` if invalid format
+
+### Profile Identifier Priority
+
+Identifier priority depends on configuration and API version.
+
+**V1 API**:
+
+- **If `enforceEmailAsPrimary` is enabled**: Use `email` and `phone_number` as identifiers. `external_id` is omitted.
+- **Otherwise**: Use `external_id` (mapped from `userId`).
+
+**V2 API**:
+
+1. **If `context.externalId` with type `klaviyo-profileId` is present**: Mapped to `data.id` for profile update operations (e.g., when profile already exists in Klaviyo).
+2. **If `enforceEmailAsPrimary` is enabled**: Use `email` and `phone_number` as identifiers. `external_id` is omitted.
+3. **Otherwise**: Use `external_id` (mapped from `userId`).
+
+## Documentation Links
+
+### REST API Documentation
+
+- [Klaviyo API Overview](https://developers.klaviyo.com/en/reference/api_overview)
+- [Profiles API](https://developers.klaviyo.com/en/reference/profiles_api_overview)
+- [Events API](https://developers.klaviyo.com/en/reference/events_api_overview)
+- [Subscriptions API](https://developers.klaviyo.com/en/reference/bulk_subscribe_profiles)
+- [Rate Limits](https://developers.klaviyo.com/en/docs/rate_limits_and_error_handling)
+- [API Versioning Policy](https://developers.klaviyo.com/en/docs/api_versioning_and_deprecation_policy)
+
+### RETL Functionality
+
+For RETL (Reverse ETL) functionality, please refer to [docs/retl.md](docs/retl.md)
+
+### Business Logic and Mappings
+
+For business logic and mappings information, please refer to [docs/businesslogic.md](docs/businesslogic.md)
diff --git a/src/v0/destinations/klaviyo/batchUtil.js b/src/v0/destinations/klaviyo/batchUtil.js
index f0489e8656c..81d70892232 100644
--- a/src/v0/destinations/klaviyo/batchUtil.js
+++ b/src/v0/destinations/klaviyo/batchUtil.js
@@ -36,7 +36,9 @@ const generateBatchedSubscriptionRequest = (subscription, destination) => {
const { listId, subscriptionProfileList, operation } = subscription;
subscriptionProfileList.forEach((profileList) => profiles.push(...profileList));
subscriptionPayloadResponse.body.JSON = getSubscriptionPayload(listId, profiles, operation);
- subscriptionPayloadResponse.endpoint = `${BASE_ENDPOINT}/api/${operation === 'subscribe' ? 'profile-subscription-bulk-create-jobs' : 'profile-subscription-bulk-delete-jobs'}`;
+ const endpointPath = `/api/${operation === 'subscribe' ? 'profile-subscription-bulk-create-jobs' : 'profile-subscription-bulk-delete-jobs'}`;
+ subscriptionPayloadResponse.endpoint = `${BASE_ENDPOINT}${endpointPath}`;
+ subscriptionPayloadResponse.endpointPath = endpointPath;
subscriptionPayloadResponse.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
'Content-Type': JSON_MIME_TYPE,
diff --git a/src/v0/destinations/klaviyo/batchUtil.test.js b/src/v0/destinations/klaviyo/batchUtil.test.js
index 9c04a402ca4..c11855e734d 100644
--- a/src/v0/destinations/klaviyo/batchUtil.test.js
+++ b/src/v0/destinations/klaviyo/batchUtil.test.js
@@ -107,6 +107,7 @@ describe('generateBatchedSubscriptionRequest', () => {
type: 'REST',
method: 'POST',
endpoint: 'https://a.klaviyo.com/api/profile-subscription-bulk-create-jobs',
+ endpointPath: '/api/profile-subscription-bulk-create-jobs',
headers: {
Authorization: 'Klaviyo-API-Key test-api-key',
'Content-Type': 'application/json',
@@ -158,6 +159,7 @@ describe('generateBatchedSubscriptionRequest', () => {
type: 'REST',
method: 'POST',
endpoint: 'https://a.klaviyo.com/api/profile-subscription-bulk-create-jobs',
+ endpointPath: '/api/profile-subscription-bulk-create-jobs',
headers: {
Authorization: 'Klaviyo-API-Key test-api-key',
'Content-Type': 'application/json',
diff --git a/src/v0/destinations/klaviyo/docs/businesslogic.md b/src/v0/destinations/klaviyo/docs/businesslogic.md
new file mode 100644
index 00000000000..088dd909edd
--- /dev/null
+++ b/src/v0/destinations/klaviyo/docs/businesslogic.md
@@ -0,0 +1,706 @@
+# Klaviyo Business Logic and Mappings
+
+## Overview
+
+This document outlines the business logic and mappings used in the Klaviyo destination integration. It covers how RudderStack events are mapped to Klaviyo's API format, the specific API endpoints used for each event type, and the special handling for various event types.
+
+## API Versions
+
+The Klaviyo destination supports two API versions:
+
+| Config Value | API Revision | Status |
+| ------------ | ------------ | ----------------- |
+| `v1` | `2023-02-22` | Deprecated |
+| `v2` | `2024-10-15` | Current (Default) |
+
+The API version is selected via `destination.Config.apiVersion` and affects endpoints, request formats, and available features.
+
+## API Endpoints and Request Flow
+
+### Identify Events
+
+#### V1 API Flow
+
+**Primary Endpoint**: `POST /api/profiles`
+**Update Endpoint**: `PATCH /api/profiles/{profileId}`
+**Subscription Endpoint**: `POST /api/profile-subscription-bulk-create-jobs`
+
+**Documentation**: [Klaviyo Profiles API](https://developers.klaviyo.com/en/reference/profiles_api_overview)
+
+**Request Flow**:
+
+1. **Profile Creation Attempt**:
+
+ ```
+ POST https://a.klaviyo.com/api/profiles
+ Headers:
+ Authorization: Klaviyo-API-Key {privateApiKey}
+ Content-Type: application/json
+ Accept: application/json
+ revision: 2023-02-22
+ ```
+
+2. **Handle Response**:
+
+ - HTTP 201 (Created): Profile created, extract `profileId` from response
+ - HTTP 409 (Conflict): Profile exists, extract `duplicate_profile_id` from `errors[0].meta`
+
+3. **Profile Update** (if 409 received):
+
+ ```
+ PATCH https://a.klaviyo.com/api/profiles/{profileId}
+ ```
+
+4. **Optional Subscription** (if `subscribe` trait is true and `listId` exists):
+ ```
+ POST https://a.klaviyo.com/api/profile-subscription-bulk-create-jobs
+ ```
+
+#### V2 API Flow
+
+**Primary Endpoint**: `POST /api/profile-import`
+**Subscribe Endpoint**: `POST /api/profile-subscription-bulk-create-jobs`
+**Unsubscribe Endpoint**: `POST /api/profile-subscription-bulk-delete-jobs`
+
+**Documentation**: [Create or Update Profile](https://developers.klaviyo.com/en/reference/create_or_update_profile), [Bulk Subscribe](https://developers.klaviyo.com/en/reference/bulk_subscribe_profiles), [Bulk Unsubscribe](https://developers.klaviyo.com/en/reference/bulk_unsubscribe_profiles)
+
+**Request Flow**:
+
+1. **Profile Import**:
+
+ ```
+ POST https://a.klaviyo.com/api/profile-import
+ Headers:
+ Authorization: Klaviyo-API-Key {privateApiKey}
+ Content-Type: application/json
+ Accept: application/json
+ revision: 2024-10-15
+ ```
+
+**RudderStack Input → V2 Profile Import** (Identify call):
+
+```json
+{
+ "userId": "user@1",
+ "anonymousId": "97c46c81-3140-456d-b2a9-690d70aaca35",
+ "traits": {
+ "email": "user@example.com",
+ "phone": "+15551234567",
+ "firstName": "John",
+ "lastName": "Doe",
+ "title": "Developer",
+ "organization": "Acme Inc",
+ "city": "Tokyo",
+ "region": "Kanto",
+ "country": "JP",
+ "zip": "100-0001",
+ "street": "63, Shibuya",
+ "custom_field": "value"
+ }
+}
+```
+
+**V2 Profile Import Payload** (sent to `POST /api/profile-import`):
+
+```json
+{
+ "data": {
+ "type": "profile",
+ "attributes": {
+ "external_id": "user@1",
+ "anonymous_id": "97c46c81-3140-456d-b2a9-690d70aaca35",
+ "email": "user@example.com",
+ "phone_number": "+15551234567",
+ "first_name": "John",
+ "last_name": "Doe",
+ "title": "Developer",
+ "organization": "Acme Inc",
+ "location": {
+ "city": "Tokyo",
+ "region": "Kanto",
+ "country": "JP",
+ "zip": "100-0001",
+ "address1": "63, Shibuya"
+ },
+ "properties": {
+ "custom_field": "value"
+ }
+ },
+ "meta": {
+ "patch_properties": {}
+ }
+ }
+}
+```
+
+2. **Optional Subscription** (if `traits.properties.subscribe` is `true` and `listId` exists)
+3. **Optional Unsubscription** (if `traits.properties.subscribe` is `false` and `listId` exists)
+
+**RudderStack Input → V2 Subscribe** (Identify with `subscribe: true`):
+
+```json
+{
+ "userId": "user@1",
+ "traits": {
+ "email": "user@example.com",
+ "phone": "+15551234567",
+ "firstName": "John",
+ "lastName": "Doe",
+ "properties": {
+ "listId": "XUepkK",
+ "subscribe": true,
+ "consent": ["email", "sms"]
+ }
+ }
+}
+```
+
+**V2 Subscribe Payload** (sent to `POST /api/profile-subscription-bulk-create-jobs`):
+
+```json
+{
+ "data": {
+ "type": "profile-subscription-bulk-create-job",
+ "attributes": {
+ "profiles": {
+ "data": [
+ {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "phone_number": "+15551234567",
+ "subscriptions": {
+ "email": { "marketing": { "consent": "SUBSCRIBED" } },
+ "sms": { "marketing": { "consent": "SUBSCRIBED" } }
+ }
+ }
+ }
+ ]
+ }
+ },
+ "relationships": {
+ "list": {
+ "data": {
+ "type": "list",
+ "id": "XUepkK"
+ }
+ }
+ }
+ }
+}
+```
+
+**RudderStack Input → V2 Unsubscribe** (Identify with `subscribe: false`):
+
+```json
+{
+ "userId": "user@1",
+ "traits": {
+ "email": "user@example.com",
+ "phone": "+15551234567",
+ "properties": {
+ "listId": "XUepkK",
+ "subscribe": false
+ }
+ }
+}
+```
+
+**V2 Unsubscribe Payload** (sent to `POST /api/profile-subscription-bulk-delete-jobs`):
+
+```json
+{
+ "data": {
+ "type": "profile-subscription-bulk-delete-job",
+ "attributes": {
+ "profiles": {
+ "data": [
+ {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "phone_number": "+15551234567"
+ }
+ }
+ ]
+ }
+ },
+ "relationships": {
+ "list": {
+ "data": {
+ "type": "list",
+ "id": "XUepkK"
+ }
+ }
+ }
+ }
+}
+```
+
+**V2 Subscription Notes**:
+
+- `traits.properties.listId` overrides destination `listId` config when present
+- `traits.properties.consent` controls channels: `["email"]`, `["sms"]`, or `["email", "sms"]`
+- At least one of `email` or `phone_number` is required for both subscribe and unsubscribe
+
+#### Identify Payload Structure
+
+**V1 Profile Payload**:
+
+```json
+{
+ "data": {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "phone_number": "+15551234567",
+ "external_id": "user123",
+ "first_name": "John",
+ "last_name": "Doe",
+ "organization": "Company",
+ "title": "Engineer",
+ "image": "https://example.com/avatar.jpg",
+ "location": {
+ "address1": "123 Main St",
+ "address2": "Apt 4",
+ "city": "San Francisco",
+ "region": "CA",
+ "zip": "94103",
+ "country": "US",
+ "latitude": 37.7749,
+ "longitude": -122.4194,
+ "timezone": "America/Los_Angeles"
+ },
+ "properties": {
+ "custom_field": "value"
+ }
+ }
+ }
+}
+```
+
+**V2 Profile Payload**:
+
+```json
+{
+ "data": {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "phone_number": "+15551234567",
+ "external_id": "user123",
+ "first_name": "John",
+ "last_name": "Doe",
+ "properties": {
+ "custom_field": "value"
+ },
+ "meta": {
+ "patch_properties": {
+ "unset": ["old_field"],
+ "append": { "list_field": "new_item" },
+ "unappend": { "list_field": "removed_item" }
+ }
+ }
+ }
+ }
+}
+```
+
+### Track Events
+
+**Endpoint**: `POST /api/events`
+**Documentation**: [Klaviyo Events API](https://developers.klaviyo.com/en/reference/events_api_overview)
+
+**Request Flow**:
+
+1. For all track events:
+ ```
+ POST https://a.klaviyo.com/api/events
+ Headers:
+ Authorization: Klaviyo-API-Key {privateApiKey}
+ Content-Type: application/json
+ Accept: application/json
+ revision: 2023-02-22 (v1) or 2024-10-15 (v2)
+ ```
+
+**Payload Structure**:
+
+```json
+{
+ "data": {
+ "type": "event",
+ "attributes": {
+ "metric": {
+ "data": {
+ "type": "metric",
+ "attributes": {
+ "name": "Event Name"
+ }
+ }
+ },
+ "profile": {
+ "data": {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "external_id": "user123"
+ }
+ }
+ },
+ "properties": {
+ "custom_property": "value"
+ },
+ "time": "2024-01-15T12:00:00Z",
+ "unique_id": "message-id-123"
+ }
+ }
+}
+```
+
+### Screen Events
+
+**Endpoint**: `POST /api/events`
+
+Screen events are converted to track events with the screen name as the event name.
+
+**Event Name Mapping**:
+
+- Screen event with name "Home" → Track event "Home"
+
+### Group Events
+
+**Subscribe Endpoint**: `POST /api/profile-subscription-bulk-create-jobs`
+**Unsubscribe Endpoint**: `POST /api/profile-subscription-bulk-delete-jobs`
+
+**Documentation**: [Klaviyo Subscriptions API](https://developers.klaviyo.com/en/reference/bulk_subscribe_profiles)
+
+**Request Flow**:
+
+1. Validate required fields:
+
+ - `groupId` is required (used as Klaviyo list ID)
+ - `subscribe` trait determines subscribe/unsubscribe operation
+
+2. Make subscription API call based on `subscribe` trait value
+
+**Subscribe Payload**:
+
+```json
+{
+ "data": {
+ "type": "profile-subscription-bulk-create-job",
+ "attributes": {
+ "profiles": {
+ "data": [
+ {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com",
+ "phone_number": "+15551234567",
+ "subscriptions": {
+ "email": { "marketing": { "consent": "SUBSCRIBED" } },
+ "sms": { "marketing": { "consent": "SUBSCRIBED" } }
+ }
+ }
+ }
+ ]
+ }
+ },
+ "relationships": {
+ "list": {
+ "data": {
+ "type": "list",
+ "id": "LIST_ID"
+ }
+ }
+ }
+ }
+}
+```
+
+**Unsubscribe Payload**:
+
+```json
+{
+ "data": {
+ "type": "profile-subscription-bulk-delete-job",
+ "attributes": {
+ "profiles": {
+ "data": [
+ {
+ "type": "profile",
+ "attributes": {
+ "email": "user@example.com"
+ }
+ }
+ ]
+ }
+ },
+ "relationships": {
+ "list": {
+ "data": {
+ "type": "list",
+ "id": "LIST_ID"
+ }
+ }
+ }
+ }
+}
+```
+
+## Data Mapping
+
+### Standard Profile Fields
+
+| RudderStack Field | Klaviyo Field | Notes |
+| --------------------- | -------------- | ---------------------------- |
+| `userId` | `external_id` | Primary identifier |
+| `traits.email` | `email` | Required for subscriptions |
+| `traits.phone` | `phone_number` | E.164 format required for V2 |
+| `traits.firstName` | `first_name` | |
+| `traits.lastName` | `last_name` | |
+| `traits.title` | `title` | |
+| `traits.organization` | `organization` | |
+| `traits.avatar` | `image` | URL to profile image |
+
+### Location Mapping
+
+| RudderStack Field | Klaviyo Field |
+| ---------------------------- | -------------------- |
+| `traits.address.street` | `location.address1` |
+| `traits.address.city` | `location.city` |
+| `traits.address.state` | `location.region` |
+| `traits.address.postalCode` | `location.zip` |
+| `traits.address.country` | `location.country` |
+| `context.location.latitude` | `location.latitude` |
+| `context.location.longitude` | `location.longitude` |
+| `context.timezone` | `location.timezone` |
+
+### Track Event Mapping
+
+| RudderStack Field | Klaviyo Field | Notes |
+| ----------------- | ------------- | ----------------- |
+| `event` | `metric.name` | Event type/name |
+| `properties` | `properties` | Custom event data |
+| `timestamp` | `time` | ISO 8601 format |
+| `messageId` | `unique_id` | Deduplication key |
+
+### E-commerce Event Mapping
+
+| RudderStack Event | Klaviyo Event |
+| ------------------ | ------------------ |
+| `product viewed` | `Viewed Product` |
+| `product clicked` | `Viewed Product` |
+| `product added` | `Added to Cart` |
+| `checkout started` | `Started Checkout` |
+
+### E-commerce Product Properties
+
+| RudderStack Field | Klaviyo Field |
+| ----------------------- | ------------------- |
+| `properties.product_id` | `ProductID` |
+| `properties.sku` | `SKU` |
+| `properties.name` | `ProductName` |
+| `properties.quantity` | `Quantity` |
+| `properties.price` | `ItemPrice` |
+| `properties.url` | `ProductURL` |
+| `properties.image_url` | `ImageURL` |
+| `properties.categories` | `ProductCategories` |
+
+## Special Handling
+
+### Profile Identifier Priority
+
+Identifier priority depends on configuration and API version.
+
+**V1 API**:
+
+- **If `enforceEmailAsPrimary` is enabled**: Use `email` and `phone_number` as identifiers. `external_id` is omitted.
+- **Otherwise**: Use `external_id` (mapped from `userId`).
+
+**V2 API**:
+
+1. **If `context.externalId` with type `klaviyo-profileId` is present**: Mapped to `data.id` for profile update operations (e.g., when profile already exists in Klaviyo).
+2. **If `enforceEmailAsPrimary` is enabled**: Use `email` and `phone_number` as identifiers. `external_id` is omitted.
+3. **Otherwise**: Use `external_id` (mapped from `userId`).
+
+```javascript
+// Code reference for external ID extraction
+const profileId = getDestinationExternalIDInfoForRetl(message, 'klaviyo-profileId')?.objectId;
+```
+
+### Phone Number Validation (V2 API)
+
+V2 API requires E.164 format for phone numbers:
+
+```javascript
+// Valid: +15551234567
+// Invalid: (555) 123-4567, 555-123-4567
+```
+
+The transformer uses `libphonenumber-js` to validate phone numbers and throws `InstrumentationError` if invalid.
+
+### Custom Properties Extraction
+
+Custom properties are extracted from traits/properties excluding whitelisted Klaviyo fields:
+
+**Whitelisted Fields (excluded from custom properties)**:
+
+- `email`, `phone`, `firstName`, `lastName`, `title`, `organization`
+- `city`, `region`, `country`, `zip`, `address`, `timezone`
+- `latitude`, `longitude`, `location`, `image`
+
+**E-commerce Exclusion Keys** (for e-commerce events):
+
+- `name`, `product_id`, `sku`, `image_url`, `url`, `brand`, `price`
+- `compare_at_price`, `quantity`, `categories`, `products`, `product_names`
+- `order_id`, `value`, `checkout_url`, `item_names`, `items`
+
+### Property Flattening
+
+When `flattenProperties` is enabled:
+
+```javascript
+// Input
+{
+ "custom": {
+ "nested": {
+ "value": "test"
+ }
+ }
+}
+
+// Output (flattened)
+{
+ "custom.nested.value": "test"
+}
+```
+
+### Metadata Operations (V2 API)
+
+V2 API supports advanced property operations via integrations object:
+
+```javascript
+// Event integrations object
+{
+ "integrations": {
+ "Klaviyo": {
+ "fieldsToUnset": ["old_property"], // Remove properties
+ "fieldsToAppend": ["tags"], // Append to arrays
+ "fieldsToUnappend": ["tags"] // Remove from arrays
+ }
+ }
+}
+```
+
+These operations are included in the profile's `meta.patch_properties` object.
+
+## Batching Logic
+
+### Subscription Batching
+
+Subscription events are batched for efficiency:
+
+1. **Grouping**: Events grouped by list ID
+2. **Chunking**: Groups chunked into MAX_BATCH_SIZE (100) profiles
+3. **Merging**: Profiles in each chunk merged into single request
+
+```javascript
+// Batching flow
+const subscribeEventGroups = lodash.groupBy(
+ subscribeResponseList,
+ (event) => event.message.body.JSON.data.attributes.list_id,
+);
+
+// Chunk each group
+const chunks = lodash.chunk(profiles, MAX_BATCH_SIZE);
+```
+
+### Event Ordering
+
+The transformer maintains event ordering through type-based grouping:
+
+```javascript
+// Input sequence
+['track1', 'identify1', 'track2', 'identify2', 'track3'][
+ // Grouped by type
+ (['track1'], ['identify1', 'identify2'], ['track2', 'track3'])
+];
+
+// Processed in order, maintaining relative sequence
+```
+
+## Error Handling
+
+### Profile Creation Errors
+
+| Status Code | Meaning | Action |
+| ----------- | ----------------- | -------------------------------------------- |
+| 201 | Profile created | Extract profileId, continue |
+| 409 | Duplicate profile | Extract duplicate_profile_id, use for update |
+| 400 | Bad request | Throw InstrumentationError |
+| 401 | Unauthorized | Throw ConfigurationError |
+| 429 | Rate limited | Retry with backoff |
+| 5xx | Server error | Retry |
+
+### Validation Errors
+
+| Error Type | Cause | Resolution |
+| ---------------------- | ------------------------- | ----------------------------------------- |
+| `InstrumentationError` | Missing required field | Ensure email/phone/userId is provided |
+| `InstrumentationError` | Invalid phone format (V2) | Use E.164 format |
+| `ConfigurationError` | Missing privateApiKey | Configure API key in destination settings |
+| `InstrumentationError` | Missing groupId | Provide groupId for group events |
+
+## Suppressed Events
+
+The transformer can suppress duplicate processing:
+
+- When profile is newly created (201 response) and `isNewStatusCodesAccepted()` returns true
+- Returns status code 299 to signal event suppression
+- Prevents duplicate profile creation from subsequent events
+
+## Consent Handling
+
+### Consent Channels
+
+Consent is managed via `destination.Config.consent` array:
+
+- `email`: Email marketing consent
+- `sms`: SMS marketing consent
+
+### Subscription Consent Structure
+
+```json
+{
+ "subscriptions": {
+ "email": {
+ "marketing": {
+ "consent": "SUBSCRIBED"
+ }
+ },
+ "sms": {
+ "marketing": {
+ "consent": "SUBSCRIBED"
+ }
+ }
+ }
+}
+```
+
+## Mapping Configuration Files
+
+The mapping configuration is defined in JSON files within the destination `data/` directory:
+
+| File | Purpose |
+| ----------------------- | ----------------------------------- |
+| `KlaviyoIdentify.json` | V1 identify event mapping |
+| `KlaviyoProfileV2.json` | V2 profile mapping |
+| `KlaviyoTrack.json` | V1 track event mapping |
+| `KlaviyoTrackV2.json` | V2 track event mapping |
+| `KlaviyoGroup.json` | Group event mapping |
+| `KlaviyoProfile.json` | Profile structure mapping |
+| `ViewedProduct.json` | Viewed Product e-commerce mapping |
+| `AddedToCart.json` | Added to Cart e-commerce mapping |
+| `StartedCheckout.json` | Started Checkout e-commerce mapping |
+| `Items.json` | Product items array mapping |
diff --git a/src/v0/destinations/klaviyo/docs/retl.md b/src/v0/destinations/klaviyo/docs/retl.md
new file mode 100644
index 00000000000..e5db782a756
--- /dev/null
+++ b/src/v0/destinations/klaviyo/docs/retl.md
@@ -0,0 +1,212 @@
+# Klaviyo RETL Functionality
+
+## Is RETL supported at all?
+
+**RETL (Reverse ETL) Support**: **Yes**
+
+The Klaviyo destination supports RETL functionality. Evidence:
+
+- `supportedSourceTypes` includes `warehouse` which indicates RETL support
+- JSON mapper is supported by default (no `disableJsonMapper: true` in config)
+- `supportsVisualMapper: true` indicates VDM v1 support
+- Supports data flow from warehouses/databases to Klaviyo
+
+## RETL Support Analysis
+
+### Which type of RETL support does it have?
+
+- **JSON Mapper**: Supported (default, no `disableJsonMapper: true`)
+- **VDM V1**: Supported (`supportsVisualMapper: true` in `db-config.json`)
+- **VDM V2**: Not supported (no `record` in `supportedMessageTypes`)
+
+### Does it have VDM support?
+
+**Yes** - `supportsVisualMapper: true` is present in `db-config.json`, confirming VDM V1 support.
+
+### Does it have VDM V2 support?
+
+**No** - Missing both:
+
+- `supportedMessageTypes > record` in `db-config.json`
+- Record event type handling in transformer code
+
+### Connection Config
+
+Standard Klaviyo configuration applies:
+
+- **Private API Key**: Klaviyo private API key for authentication
+- **Public API Key**: Klaviyo public API key
+- **API Version**: v1 or v2 (v2 recommended)
+- **List ID**: Default list for subscriptions
+- **Consent**: Email and/or SMS consent channels
+- **Flatten Properties**: Option to flatten nested properties
+
+## RETL Flow Implementation
+
+### Warehouse Integration
+
+Klaviyo supports RETL through warehouse sources with both JSON mapper and VDM v1 functionality:
+
+- **Supported**: Yes, warehouse sources can send data to Klaviyo via RETL
+- **Connection Mode**: Cloud mode only
+- **Message Types**: group, identify, screen, track
+- **Data Flow**: Warehouse/Database → RudderStack → Klaviyo (via REST API)
+- **Mapping**: JSON mapper and VDM v1 transform warehouse data to Klaviyo format
+
+### Supported Message Types for RETL
+
+```json
+"supportedMessageTypes": {
+ "cloud": ["group", "identify", "screen", "track"]
+}
+```
+
+### RETL Event Processing
+
+The Klaviyo destination processes RETL events similarly to event stream events, with the following considerations:
+
+#### Key RETL-Specific Behaviors
+
+1. **Profile Creation/Update**:
+
+ - RETL identify events create or update profiles in Klaviyo
+ - Uses the same two-step process (create → update) as event stream
+ - Profile deduplication handled via 409 Conflict response
+
+2. **List Subscriptions**:
+
+ - Group events can subscribe/unsubscribe profiles from lists
+ - Requires `groupId` (as list ID) and `subscribe` trait
+
+3. **Event Tracking**:
+
+ - Track events from RETL sources create events in Klaviyo
+ - Supports e-commerce event mapping
+
+4. **Mapped to Destination Support**:
+ - **NEEDS REVIEW**: The Klaviyo transformer may support `context.mappedToDestination` flag for pre-formatted data
+ - When enabled, traits may be passed directly without standard mapping
+
+## Data Flow
+
+### RETL Data Processing
+
+1. **Data Extraction**: Warehouse/database data extracted by RudderStack
+2. **Mapping**: Data transformed using JSON mapper or VDM v1 configuration
+3. **Event Construction**: Warehouse records converted to Klaviyo events
+4. **API Delivery**: Events sent to Klaviyo via REST API endpoints
+
+### Example RETL Identify Event
+
+```json
+{
+ "type": "identify",
+ "userId": "user123",
+ "traits": {
+ "email": "user@example.com",
+ "firstName": "John",
+ "lastName": "Doe",
+ "phone": "+15551234567",
+ "subscribe": true,
+ "custom_attribute": "value"
+ },
+ "context": {
+ "externalId": [
+ {
+ "id": "klaviyo_profile_id",
+ "type": "klaviyo-profileId"
+ }
+ ]
+ }
+}
+```
+
+### Example RETL Track Event
+
+```json
+{
+ "type": "track",
+ "userId": "user123",
+ "event": "Order Completed",
+ "properties": {
+ "orderId": "order_123",
+ "total": 99.99,
+ "currency": "USD",
+ "products": [
+ {
+ "product_id": "prod_1",
+ "name": "Product Name",
+ "price": 49.99,
+ "quantity": 2
+ }
+ ]
+ }
+}
+```
+
+### Example RETL Group Event (Subscribe to List)
+
+```json
+{
+ "type": "group",
+ "userId": "user123",
+ "groupId": "LIST_ID_HERE",
+ "traits": {
+ "subscribe": true,
+ "email": "user@example.com"
+ }
+}
+```
+
+## Rate Limits and Constraints
+
+### Klaviyo API Limits
+
+- **REST API**: Standard Klaviyo API rate limits apply
+- **Batch Size**: 100 profiles per subscription batch
+- **Request Rate**: Per-account limits with burst and steady windows
+
+### RETL Processing Constraints
+
+- **Message Types**: Supports identify, track, screen, and group
+- **JSON Mapper and VDM v1**: Both supported for data transformation
+- **Cloud Mode Only**: Device mode not supported for RETL
+- **Profile Identifiers**: Email, phone (E.164), or external_id required
+
+## Batching for RETL
+
+### Subscription Batching
+
+RETL events that result in subscription operations are batched:
+
+- **Batch Size**: Up to 100 profiles per API call
+- **Grouping**: Subscriptions grouped by list ID
+- **Ordering**: Profiles updates processed before subscriptions
+
+### Profile Updates
+
+Profile update events are not batched together but maintain ordering for consistency.
+
+## Summary
+
+The Klaviyo destination supports RETL functionality through:
+
+- **RETL Support**: Yes, via warehouse source type support
+- **JSON Mapper**: Supported by default for data transformation
+- **VDM v1**: Supported (`supportsVisualMapper: true`)
+- **VDM v2**: Not supported (no `record` message type)
+- **Supported Events**: identify, track, screen, group
+- **API Integration**: Klaviyo REST API for data delivery
+
+**Key Features**:
+
+- Profile creation and updates from warehouse data
+- List subscription management via group events
+- E-commerce event tracking with automatic mapping
+- Batch subscription operations for efficiency
+
+**Limitations**:
+
+- No VDM v2 support (no record message type)
+- Cloud mode only for RETL functionality
+- No full profile deletion (only list unsubscription)
diff --git a/src/v0/destinations/klaviyo/transform.js b/src/v0/destinations/klaviyo/transform.js
index d73dbca6003..b8741e71d41 100644
--- a/src/v0/destinations/klaviyo/transform.js
+++ b/src/v0/destinations/klaviyo/transform.js
@@ -246,6 +246,7 @@ const trackRequestHandler = (message, category, destination) => {
payload.data.attributes = attributes;
const response = defaultRequestConfig();
response.endpoint = `${BASE_ENDPOINT}${category.apiUrl}`;
+ response.endpointPath = category.apiUrl;
response.method = defaultPostRequestConfig.requestMethod;
response.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js
index 8eb7660aea7..a8b864717d4 100644
--- a/src/v0/destinations/klaviyo/util.js
+++ b/src/v0/destinations/klaviyo/util.js
@@ -129,6 +129,7 @@ const profileUpdateResponseBuilder = (payload, profileId, category, privateApiKe
const identifyResponse = defaultRequestConfig();
updatedPayload.data.id = profileId;
identifyResponse.endpoint = `${BASE_ENDPOINT}${category.apiUrl}/${profileId}`;
+ identifyResponse.endpointPath = category.apiUrl;
identifyResponse.method = defaultPatchRequestConfig.requestMethod;
identifyResponse.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
@@ -190,6 +191,7 @@ const subscribeUserToList = (message, traitsInfo, destination) => {
const response = defaultRequestConfig();
response.method = defaultPostRequestConfig.requestMethod;
response.endpoint = targetUrl;
+ response.endpointPath = '/api/profile-subscription-bulk-create-jobs';
response.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
'Content-Type': JSON_MIME_TYPE,
@@ -261,6 +263,7 @@ const generateBatchedPaylaodForArray = (events) => {
const BATCH_ENDPOINT = `${BASE_ENDPOINT}/api/profile-subscription-bulk-create-jobs`;
batchEventResponse.batchedRequest[0].endpoint = BATCH_ENDPOINT;
+ batchEventResponse.batchedRequest[0].endpointPath = '/api/profile-subscription-bulk-create-jobs';
batchEventResponse.batchedRequest[0].headers = {
Authorization: `Klaviyo-API-Key ${destination.Config.privateApiKey}`,
@@ -340,6 +343,7 @@ const buildRequest = (payload, destination, category) => {
const response = defaultRequestConfig();
response.endpoint = `${BASE_ENDPOINT}${category.apiUrl}`;
+ response.endpointPath = category.apiUrl;
response.method = defaultPostRequestConfig.requestMethod;
response.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
@@ -584,7 +588,9 @@ const getSubscriptionPayload = (listId, profiles, operation) => ({
const buildSubscriptionOrUnsubscriptionPayload = (subscription, destination) => {
const response = defaultRequestConfig();
const { privateApiKey } = destination.Config;
- response.endpoint = `${BASE_ENDPOINT}${CONFIG_CATEGORIES[subscription.operation.toUpperCase()].apiUrl}`;
+ const { apiUrl } = CONFIG_CATEGORIES[subscription.operation.toUpperCase()];
+ response.endpoint = `${BASE_ENDPOINT}${apiUrl}`;
+ response.endpointPath = apiUrl;
response.method = defaultPostRequestConfig.requestMethod;
response.headers = {
Authorization: `Klaviyo-API-Key ${privateApiKey}`,
diff --git a/src/v0/destinations/marketo/config.js b/src/v0/destinations/marketo/config.js
index 604be41dc07..4a1bffa426b 100644
--- a/src/v0/destinations/marketo/config.js
+++ b/src/v0/destinations/marketo/config.js
@@ -27,9 +27,6 @@ const formatConfig = (destination) => ({
? getHashFromArray(destination.Config.rudderEventsMapping, 'event', 'marketoPrimarykey', false)
: getHashFromArray(destination.Config.customActivityPrimaryKeyMap, 'from', 'to', false),
leadTraitMapping: getHashFromArray(destination.Config.leadTraitMapping, 'from', 'to', false),
- responseRules: destination.DestinationDefinition
- ? destination.DestinationDefinition.ResponseRules
- : null,
});
module.exports = {
diff --git a/src/v0/destinations/marketo/util.js b/src/v0/destinations/marketo/util.js
index 1f6c259f56c..321fa15c8b5 100644
--- a/src/v0/destinations/marketo/util.js
+++ b/src/v0/destinations/marketo/util.js
@@ -236,7 +236,12 @@ const marketoResponseHandler = (
if (response.errors.length > 0 && response.errors[0].message) {
message += ` -> ${response.errors[0].message}`;
}
- // Marketo sent us some failure which is not handled
+ logger.error('Marketo sent us some failure which is not handled', {
+ status,
+ responseErrors: JSON.stringify(response.errors),
+ responseSuccess: response.success,
+ errorMessage: message,
+ });
throw new UnhandledStatusCodeError(message, destResponse);
};
diff --git a/src/v0/destinations/posthog/config.js b/src/v0/destinations/posthog/config.ts
similarity index 72%
rename from src/v0/destinations/posthog/config.js
rename to src/v0/destinations/posthog/config.ts
index 6fae577ce8a..aa457f2950b 100644
--- a/src/v0/destinations/posthog/config.js
+++ b/src/v0/destinations/posthog/config.ts
@@ -1,4 +1,4 @@
-const { getMappingConfig } = require('../../util');
+import { getMappingConfig } from '../../util';
const DEFAULT_BASE_ENDPOINT = 'https://app.posthog.com';
@@ -34,15 +34,12 @@ const CONFIG_CATEGORIES = {
type: 'screen',
event: '$screen',
},
- PROPERTY: {
- name: 'PHPropertiesConfig',
- },
};
-const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname);
-
-module.exports = {
- DEFAULT_BASE_ENDPOINT,
- CONFIG_CATEGORIES,
- MAPPING_CONFIG,
+const PROPERTY = {
+ name: 'PHPropertiesConfig',
};
+
+const MAPPING_CONFIG = getMappingConfig({ ...CONFIG_CATEGORIES, PROPERTY }, __dirname);
+
+export { DEFAULT_BASE_ENDPOINT, CONFIG_CATEGORIES, MAPPING_CONFIG, PROPERTY };
diff --git a/src/v0/destinations/posthog/transform.js b/src/v0/destinations/posthog/transform.ts
similarity index 73%
rename from src/v0/destinations/posthog/transform.js
rename to src/v0/destinations/posthog/transform.ts
index 62a2f267834..3734f832b02 100644
--- a/src/v0/destinations/posthog/transform.js
+++ b/src/v0/destinations/posthog/transform.ts
@@ -1,8 +1,12 @@
-const get = require('get-value');
-const { InstrumentationError, TransformationError } = require('@rudderstack/integrations-lib');
-const { EventType } = require('../../../constants');
-const { DEFAULT_BASE_ENDPOINT, CONFIG_CATEGORIES, MAPPING_CONFIG } = require('./config');
-const {
+import get from 'get-value';
+import {
+ InstrumentationError,
+ TransformationError,
+ isDefinedAndNotNull,
+} from '@rudderstack/integrations-lib';
+import { EventType } from '../../../constants';
+import { DEFAULT_BASE_ENDPOINT, CONFIG_CATEGORIES, MAPPING_CONFIG, PROPERTY } from './config';
+import {
defaultRequestConfig,
getBrowserInfo,
getDeviceModel,
@@ -11,18 +15,24 @@ const {
ErrorMessage,
isValidUrl,
stripTrailingSlash,
- isDefinedAndNotNull,
removeUndefinedAndNullValues,
simpleProcessRouterDest,
-} = require('../../util');
-const { JSON_MIME_TYPE } = require('../../util/constant');
+} from '../../util';
+import { JSON_MIME_TYPE } from '../../util/constant';
+import type { RudderMessage } from '../../../types';
+import type {
+ PostHogCategory,
+ PostHogDestination,
+ PostHogMessage,
+ PostHogPayload,
+ PostHogProcessorRequest,
+ PostHogResponseBody,
+ PostHogRouterRequest,
+} from './types';
// Logic To match destination Property key that is in Rudder Stack Properties Object.
-const generatePropertyDefination = (message) => {
- const PHPropertyJson = CONFIG_CATEGORIES.PROPERTY.name;
- const propertyJson = MAPPING_CONFIG[PHPropertyJson];
- let data = {};
-
+const generatePropertyDefination = (message: PostHogMessage) => {
+ const propertyJson = MAPPING_CONFIG[PROPERTY.name];
// Filter out property specific to mobile or web. isMobile key takes care of it.
// Array Filter() will map propeerty on basis of given condition and filters it.
// if (message.channel === "mobile") {
@@ -35,7 +45,7 @@ const generatePropertyDefination = (message) => {
// });
// }
- data = constructPayload(message, propertyJson);
+ let data = constructPayload(message, propertyJson)!;
// This logic ensures to get browser info only for payload generated from web.
if (message.channel === 'web' && message.context && message.context.userAgent) {
@@ -70,9 +80,13 @@ const generatePropertyDefination = (message) => {
return removeUndefinedAndNullValues(data);
};
-const responseBuilderSimple = (message, category, destination) => {
+const responseBuilderSimple = (
+ message: RudderMessage,
+ category: PostHogCategory,
+ destination: PostHogDestination,
+) => {
// This is to ensure backward compatibility of group calls.
- let payload;
+ let payload: PostHogPayload | null;
if (category.type === 'group' && destination.Config.useV2Group) {
payload = constructPayload(message, MAPPING_CONFIG[CONFIG_CATEGORIES.GROUPV2.name]);
} else {
@@ -123,7 +137,7 @@ const responseBuilderSimple = (message, category, destination) => {
payload.event = category.event;
}
- const responseBody = {
+ const responseBody: PostHogResponseBody = {
...payload,
api_key: destination.Config.teamApiKey,
type: category.type,
@@ -140,24 +154,30 @@ const responseBuilderSimple = (message, category, destination) => {
return response;
};
-const processEvent = (message, destination) => {
+const isValidCategoryKey = (key: string): key is keyof typeof CONFIG_CATEGORIES =>
+ key in CONFIG_CATEGORIES;
+
+const processEvent = (message: RudderMessage, destination: PostHogDestination) => {
if (!message.type) {
throw new InstrumentationError('Event type is required');
}
- const category = CONFIG_CATEGORIES[message.type.toUpperCase()];
- if (!category) {
+ const key = message.type.toUpperCase();
+ if (!isValidCategoryKey(key)) {
throw new InstrumentationError(`Event type ${message.type} is not supported`);
}
- return responseBuilderSimple(message, category, destination);
+ return responseBuilderSimple(message, CONFIG_CATEGORIES[key], destination);
};
-const process = (event) => processEvent(event.message, event.destination);
+const process = (event: PostHogProcessorRequest) => processEvent(event.message, event.destination);
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = await simpleProcessRouterDest(inputs, process, reqMetadata);
+const processRouterDest = async (
+ inputs: PostHogRouterRequest[],
+ reqMetadata: Record,
+) => {
+ const respList = await simpleProcessRouterDest(inputs, process, reqMetadata, undefined);
return respList;
};
-module.exports = { process, processRouterDest };
+export { process, processRouterDest };
diff --git a/src/v0/destinations/posthog/types.ts b/src/v0/destinations/posthog/types.ts
new file mode 100644
index 00000000000..373915a10cd
--- /dev/null
+++ b/src/v0/destinations/posthog/types.ts
@@ -0,0 +1,77 @@
+import type { RudderMessage, Metadata } from '../../../types';
+import type { Destination } from '../../../types/controlPlaneConfig';
+import type {
+ ProcessorTransformationRequest,
+ RouterTransformationRequestData,
+} from '../../../types/destinationTransformation';
+
+export type PostHogDestinationConfig = {
+ useV2Group?: boolean;
+ teamApiKey: string;
+ yourInstance?: string;
+};
+
+// Properties directly accessed in transform.ts; remaining PHPropertiesConfig fields
+// are populated implicitly by constructPayload and covered by the index signature.
+export type PostHogProperties = {
+ $os?: string;
+ $current_url?: string;
+ distinct_id?: string;
+ $browser?: string;
+ $browser_version?: string;
+ $screen_name?: string;
+ $host?: string;
+ $set?: Record;
+ timestamp?: string;
+ $group_set?: Record;
+ $group_type?: string;
+ $group_key?: string;
+ $groups?: Record;
+ [key: string]: unknown;
+};
+
+export type PostHogResponseBody = {
+ distinct_id?: string;
+ event?: string;
+ timestamp?: string;
+ properties?: PostHogProperties;
+ api_key: string;
+ type: string;
+ [key: string]: unknown;
+};
+
+// Payload shape before api_key and type are added
+export type PostHogPayload = {
+ distinct_id?: string;
+ event?: string;
+ timestamp?: string;
+ properties?: PostHogProperties;
+};
+
+export type PostHogCategory = {
+ name: string;
+ type: string;
+ event?: string;
+};
+
+export type PostHogDestination = Destination;
+
+export interface PostHogMessage extends RudderMessage {
+ context?: {
+ userAgent?: string;
+ traits?: Record;
+ };
+}
+
+export type PostHogProcessorRequest = ProcessorTransformationRequest<
+ PostHogMessage,
+ Metadata,
+ PostHogDestination
+>;
+
+export type PostHogRouterRequest = RouterTransformationRequestData<
+ PostHogMessage,
+ PostHogDestination,
+ undefined,
+ Metadata
+>;
diff --git a/src/v0/destinations/salesforce/config.js b/src/v0/destinations/salesforce/config.js
index 2e2697835c0..06a7863b95e 100644
--- a/src/v0/destinations/salesforce/config.js
+++ b/src/v0/destinations/salesforce/config.js
@@ -28,6 +28,7 @@ const SF_TOKEN_REQUEST_URL_SANDBOX = 'https://test.salesforce.com/services/oauth
const DESTINATION = 'Salesforce';
const SALESFORCE_OAUTH_SANDBOX = 'salesforce_oauth_sandbox';
+const SALESFORCE_OAUTH = 'SALESFORCE_OAUTH';
const OAUTH = 'oauth';
const LEGACY = 'legacy';
@@ -48,4 +49,5 @@ module.exports = {
OAUTH,
LEGACY,
SALESFORCE_OAUTH_SANDBOX,
+ SALESFORCE_OAUTH,
};
diff --git a/src/v0/destinations/salesforce/transform.js b/src/v0/destinations/salesforce/transform.js
index 2b27174667e..9e11a72df6c 100644
--- a/src/v0/destinations/salesforce/transform.js
+++ b/src/v0/destinations/salesforce/transform.js
@@ -29,7 +29,7 @@ const {
getAuthHeader,
getSalesforceIdForRecord,
getSalesforceIdForLead,
- isWorkspaceSupportedForSoql,
+ isWorkspaceAndDestTypeSupportedForSoql,
} = require('./utils');
const { JSON_MIME_TYPE } = require('../../util/constant');
// Basic response builder
@@ -259,7 +259,13 @@ async function process(event) {
const authInfo = await collectAuthorizationInfo(event);
let salesforceSdk;
- if (isWorkspaceSupportedForSoql(event?.metadata?.workspaceId ?? '')) {
+ const { destination, metadata } = event;
+ if (
+ isWorkspaceAndDestTypeSupportedForSoql(
+ destination.DestinationDefinition?.Name ?? '',
+ metadata?.workspaceId ?? '',
+ )
+ ) {
const { token, instanceUrl } = authInfo.authorizationData;
salesforceSdk = new SalesforceSDK.Salesforce({
accessToken: token,
@@ -291,8 +297,9 @@ const processRouterDest = async (inputs, reqMetadata) => {
}
try {
- const metadata = inputs?.[0]?.metadata;
- if (isWorkspaceSupportedForSoql(metadata?.workspaceId ?? '')) {
+ const workspaceId = inputs?.[0]?.metadata?.workspaceId ?? '';
+ const destinationDefinitionName = inputs?.[0]?.destination?.DestinationDefinition?.Name ?? '';
+ if (isWorkspaceAndDestTypeSupportedForSoql(destinationDefinitionName, workspaceId)) {
const { token, instanceUrl } = authInfo.authorizationData;
salesforceSdk = new SalesforceSDK.Salesforce({
diff --git a/src/v0/destinations/salesforce/utils.js b/src/v0/destinations/salesforce/utils.js
index 0501cd4e2dc..20a19b55fb4 100644
--- a/src/v0/destinations/salesforce/utils.js
+++ b/src/v0/destinations/salesforce/utils.js
@@ -5,6 +5,7 @@ const {
OAuthSecretError,
isDefinedAndNotNullAndNotEmpty,
NetworkInstrumentationError,
+ InstrumentationError,
} = require('@rudderstack/integrations-lib');
const { handleHttpRequest } = require('../../../adapters/network');
const {
@@ -13,7 +14,6 @@ const {
isDefinedAndNotNull,
} = require('../../util');
const Cache = require('../../util/cache');
-const stats = require('../../../util/stats');
const {
CLIENT_ID,
CLIENT_SECRET,
@@ -25,6 +25,7 @@ const {
OAUTH,
SALESFORCE_OAUTH_SANDBOX,
SF_API_VERSION,
+ SALESFORCE_OAUTH,
} = require('./config');
const { REFRESH_TOKEN } = require('../../../adapters/networkhandler/authConstants');
@@ -246,11 +247,25 @@ const getAuthHeader = (authInfo) => {
: { Authorization: authorizationData.token };
};
-const isWorkspaceSupportedForSoql = (workspaceId) => {
- const soqlSupportedWorkspaceIds = process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS?.split(
- ',',
- )?.map?.((s) => s?.trim?.());
- return soqlSupportedWorkspaceIds?.includes(workspaceId) ?? false;
+const isWorkspaceAndDestTypeSupportedForSoql = (
+ destinationDefinitionName = '',
+ workspaceId = '',
+) => {
+ const upperCaseName = destinationDefinitionName?.toUpperCase?.() ?? '';
+ if (upperCaseName !== SALESFORCE_OAUTH) {
+ return false;
+ }
+
+ const parseIdList = (envVar) =>
+ envVar
+ ?.split(',')
+ ?.map((s) => s?.trim())
+ ?.filter((s) => s) ?? [];
+
+ const normalizedWorkspaceId = workspaceId?.trim();
+
+ const skipList = parseIdList(process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS);
+ return !skipList.includes(normalizedWorkspaceId);
};
/**
@@ -305,6 +320,25 @@ async function getSalesforceIdForRecordUsingHttp(
return searchRecord?.Id;
}
+const SOQL_FIELD_NAME_REGEX = /^[A-Z_a-z]\w*$/;
+
+/**
+ * Escapes a value for safe interpolation into a SOQL query string.
+ * Numeric values are returned as-is; all other values are wrapped in single quotes
+ * with internal backslashes and single quotes escaped.
+ * @param {*} value
+ * @returns {string|number}
+ */
+function soqlEscapeValue(value) {
+ if (typeof value === 'number' && Number.isFinite(value)) {
+ return value;
+ }
+ const escaped = String(value)
+ .replaceAll('\\', String.raw`\\`)
+ .replaceAll("'", String.raw`\'`);
+ return `'${escaped}'`;
+}
+
/**
* Get the Salesforce ID for a record using the Salesforce SDK
* @param {SalesforceSDK} salesforceSdk The Salesforce SDK instance.
@@ -319,10 +353,13 @@ async function getSalesforceIdForRecordUsingSdk(
identifierType,
identifierValue,
) {
+ if (!SOQL_FIELD_NAME_REGEX.test(identifierType)) {
+ throw new InstrumentationError(`Invalid identifierType for SOQL query: ${identifierType}`);
+ }
let queryResponse;
try {
queryResponse = await salesforceSdk.query(
- `SELECT Id FROM ${objectType} WHERE ${identifierType} = '${identifierValue}'`,
+ `SELECT Id FROM ${objectType} WHERE ${identifierType} = ${soqlEscapeValue(identifierValue)}`,
);
} catch (error) {
// check if the error message contains 'session expired'
@@ -377,12 +414,12 @@ async function getSalesforceIdForRecord({
metadata,
stateInfo,
}) {
- if (isWorkspaceSupportedForSoql(metadata?.workspaceId ?? '')) {
- stats.increment('salesforce_soql_lookup_count', {
- method: 'getSalesforceIdForRecordUsingSdk',
- workspaceId: metadata?.workspaceId ?? '',
- objectType,
- });
+ if (
+ isWorkspaceAndDestTypeSupportedForSoql(
+ destination.DestinationDefinition?.Name ?? '',
+ metadata?.workspaceId ?? '',
+ )
+ ) {
return getSalesforceIdForRecordUsingSdk(
stateInfo.salesforceSdk,
objectType,
@@ -547,12 +584,12 @@ async function getSalesforceIdForLeadUsingHttp(email, destination, authInfo, met
* @returns {Promise<{ salesforceType: string, salesforceId: string }>} The Salesforce type and ID for the lead.
*/
async function getSalesforceIdForLead({ email, destination, metadata, stateInfo }) {
- if (isWorkspaceSupportedForSoql(metadata?.workspaceId ?? '')) {
- stats.increment('salesforce_soql_lookup_count', {
- method: 'getSalesforceIdForLeadUsingSdk',
- workspaceId: metadata?.workspaceId ?? '',
- objectType: 'Lead',
- });
+ if (
+ isWorkspaceAndDestTypeSupportedForSoql(
+ destination?.DestinationDefinition?.Name ?? '',
+ metadata?.workspaceId ?? '',
+ )
+ ) {
return getSalesforceIdForLeadUsingSdk(stateInfo.salesforceSdk, email, destination);
}
return getSalesforceIdForLeadUsingHttp(email, destination, stateInfo.authInfo, metadata);
@@ -570,5 +607,5 @@ module.exports = {
getSalesforceIdForLead,
getSalesforceIdForLeadUsingHttp,
getSalesforceIdForLeadUsingSdk,
- isWorkspaceSupportedForSoql,
+ isWorkspaceAndDestTypeSupportedForSoql,
};
diff --git a/src/v0/destinations/salesforce/utils.test.js b/src/v0/destinations/salesforce/utils.test.js
index 327fc1faade..5bc816d6286 100644
--- a/src/v0/destinations/salesforce/utils.test.js
+++ b/src/v0/destinations/salesforce/utils.test.js
@@ -3,12 +3,13 @@ const {
RetryableError,
ThrottledError,
AbortedError,
+ InstrumentationError,
} = require('@rudderstack/integrations-lib');
const { handleHttpRequest } = require('../../../adapters/network');
const { isHttpStatusSuccess } = require('../../util');
const { REFRESH_TOKEN } = require('../../../adapters/networkhandler/authConstants');
const {
- isWorkspaceSupportedForSoql,
+ isWorkspaceAndDestTypeSupportedForSoql,
getSalesforceIdForRecordUsingHttp,
getSalesforceIdForRecordUsingSdk,
getSalesforceIdForRecord,
@@ -31,47 +32,55 @@ describe('Salesforce Utils', () => {
beforeEach(() => {
jest.clearAllMocks();
process.env = { ...originalEnv };
- delete process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS;
+ delete process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS;
});
afterEach(() => {
process.env = originalEnv;
});
- describe('isWorkspaceSupportedForSoql', () => {
- it('should return true when workspace ID is in the supported list', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1,ws2,ws3';
- expect(isWorkspaceSupportedForSoql('ws2')).toBe(true);
+ describe('isWorkspaceAndDestTypeSupportedForSoql', () => {
+ it('should return false for non-SALESFORCE_OAUTH destination types', () => {
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE', 'ws1')).toBe(false);
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE', '')).toBe(false);
+ expect(isWorkspaceAndDestTypeSupportedForSoql(undefined, 'ws1')).toBe(false);
});
- it('should return false when workspace ID is not in the supported list', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1,ws2,ws3';
- expect(isWorkspaceSupportedForSoql('ws4')).toBe(false);
+ it('should return true for SALESFORCE_OAUTH workspaces by default', () => {
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws1')).toBe(true);
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws2')).toBe(true);
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'any-workspace')).toBe(
+ true,
+ );
});
- it('should return false when environment variable is not set', () => {
- delete process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS;
- expect(isWorkspaceSupportedForSoql('ws1')).toBe(false);
+ it('should return true when skip list is not set', () => {
+ delete process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS;
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws1')).toBe(true);
});
- it('should return false when environment variable is empty', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = '';
- expect(isWorkspaceSupportedForSoql('ws1')).toBe(false);
+ it('should return true when skip list is empty', () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = '';
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws1')).toBe(true);
});
- it('should handle workspace IDs with spaces in the list', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1, ws2 , ws3';
- expect(isWorkspaceSupportedForSoql('ws2')).toBe(true);
- });
+ describe('skip list (DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS)', () => {
+ it('should return false when workspaceId is in the skip list', () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = 'ws1,ws2';
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws1')).toBe(false);
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws2')).toBe(false);
+ });
- it('should return false for empty workspace ID', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1,ws2';
- expect(isWorkspaceSupportedForSoql('')).toBe(false);
- });
+ it('should trim spaces in skip list entries', () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = 'ws1, ws2 , ws3';
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws2')).toBe(false);
+ });
- it('should return false for undefined workspace ID', () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1,ws2';
- expect(isWorkspaceSupportedForSoql(undefined)).toBe(false);
+ it('should return true for workspaceIds not in the skip list', () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = 'ws1';
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws2')).toBe(true);
+ expect(isWorkspaceAndDestTypeSupportedForSoql('SALESFORCE_OAUTH', 'ws3')).toBe(true);
+ });
});
});
@@ -580,17 +589,72 @@ describe('Salesforce Utils', () => {
mockSalesforceSdk,
'Account',
'External_ID__c',
- "test'value",
+ 'test value',
);
expect(mockSalesforceSdk.query).toHaveBeenCalledWith(
- "SELECT Id FROM Account WHERE External_ID__c = 'test'value'",
+ "SELECT Id FROM Account WHERE External_ID__c = 'test value'",
+ );
+ });
+
+ it('should throw InstrumentationError for invalid identifierType', async () => {
+ await expect(
+ getSalesforceIdForRecordUsingSdk(
+ mockSalesforceSdk,
+ 'Account',
+ "Field'; DROP TABLE Account--",
+ 'value',
+ ),
+ ).rejects.toThrow(InstrumentationError);
+ });
+
+ it('should not remove quote from numeric identifierValue', async () => {
+ mockSalesforceSdk.query.mockResolvedValueOnce({
+ totalSize: 1,
+ records: [{ Id: '0011234567890ABC' }],
+ });
+
+ await getSalesforceIdForRecordUsingSdk(mockSalesforceSdk, 'Account', 'Count__c', '42');
+
+ expect(mockSalesforceSdk.query).toHaveBeenCalledWith(
+ "SELECT Id FROM Account WHERE Count__c = '42'",
+ );
+ });
+
+ it('should not add quote to numeric identifierValue', async () => {
+ mockSalesforceSdk.query.mockResolvedValueOnce({
+ totalSize: 1,
+ records: [{ Id: '0011234567890ABC' }],
+ });
+
+ await getSalesforceIdForRecordUsingSdk(mockSalesforceSdk, 'Account', 'Count__c', 42);
+
+ expect(mockSalesforceSdk.query).toHaveBeenCalledWith(
+ 'SELECT Id FROM Account WHERE Count__c = 42',
+ );
+ });
+
+ it('should escape single quotes in string identifierValue', async () => {
+ mockSalesforceSdk.query.mockResolvedValueOnce({
+ totalSize: 1,
+ records: [{ Id: '0011234567890ABC' }],
+ });
+
+ await getSalesforceIdForRecordUsingSdk(mockSalesforceSdk, 'Account', 'Name', "O'Brien");
+
+ expect(mockSalesforceSdk.query).toHaveBeenCalledWith(
+ "SELECT Id FROM Account WHERE Name = 'O\\'Brien'",
);
});
});
describe('getSalesforceIdForRecord', () => {
- const mockDestination = { ID: 'dest-123' };
+ const mockDestination = {
+ ID: 'dest-123',
+ DestinationDefinition: {
+ Name: 'SALESFORCE_OAUTH',
+ },
+ };
const mockMetadata = { workspaceId: 'ws1' };
const mockSalesforceSdk = {
query: jest.fn(),
@@ -608,8 +672,7 @@ describe('Salesforce Utils', () => {
isHttpStatusSuccess.mockReturnValue(true);
});
- it('should use SDK when workspace is supported for SOQL', async () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1';
+ it('should use SDK by default for SALESFORCE_OAUTH', async () => {
mockSalesforceSdk.query.mockResolvedValueOnce({
totalSize: 1,
records: [{ Id: '0011234567890ABC' }],
@@ -634,8 +697,8 @@ describe('Salesforce Utils', () => {
expect(handleHttpRequest).not.toHaveBeenCalled();
});
- it('should use HTTP when workspace is not supported for SOQL', async () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws2';
+ it('should use HTTP when workspace is in the skip list', async () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = 'ws1';
handleHttpRequest.mockResolvedValueOnce({
processedResponse: {
response: {
@@ -664,15 +727,10 @@ describe('Salesforce Utils', () => {
expect(mockSalesforceSdk.query).not.toHaveBeenCalled();
});
- it('should use HTTP when workspace ID is undefined', async () => {
- delete process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS;
- handleHttpRequest.mockResolvedValueOnce({
- processedResponse: {
- response: {
- searchRecords: [{ Id: '0011234567890ABC', External_ID__c: 'ext-123' }],
- },
- status: 200,
- },
+ it('should use SDK when workspace ID is undefined', async () => {
+ mockSalesforceSdk.query.mockResolvedValueOnce({
+ totalSize: 1,
+ records: [{ Id: '0011234567890ABC' }],
});
const stateInfo = {
@@ -690,7 +748,8 @@ describe('Salesforce Utils', () => {
});
expect(result).toBe('0011234567890ABC');
- expect(handleHttpRequest).toHaveBeenCalled();
+ expect(mockSalesforceSdk.query).toHaveBeenCalled();
+ expect(handleHttpRequest).not.toHaveBeenCalled();
});
});
@@ -1388,6 +1447,9 @@ describe('Salesforce Utils', () => {
Config: {
useContactId: false,
},
+ DestinationDefinition: {
+ Name: 'SALESFORCE_OAUTH',
+ },
};
const mockMetadata = { workspaceId: 'ws1' };
const mockSalesforceSdk = {
@@ -1406,8 +1468,7 @@ describe('Salesforce Utils', () => {
isHttpStatusSuccess.mockReturnValue(true);
});
- it('should use SDK when workspace is supported for SOQL', async () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws1';
+ it('should use SDK by default for SALESFORCE_OAUTH', async () => {
mockSalesforceSdk.query.mockResolvedValueOnce({
totalSize: 1,
records: [
@@ -1440,8 +1501,8 @@ describe('Salesforce Utils', () => {
expect(handleHttpRequest).not.toHaveBeenCalled();
});
- it('should use HTTP when workspace is not supported for SOQL', async () => {
- process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS = 'ws2';
+ it('should use HTTP when workspace is in the skip list', async () => {
+ process.env.DEST_SALESFORCE_SOQL_SKIP_WORKSPACE_IDS = 'ws1';
handleHttpRequest.mockResolvedValueOnce({
processedResponse: {
response: {
@@ -1478,22 +1539,17 @@ describe('Salesforce Utils', () => {
expect(mockSalesforceSdk.query).not.toHaveBeenCalled();
});
- it('should use HTTP when workspace ID is undefined', async () => {
- delete process.env.DEST_SALESFORCE_SOQL_SUPPORTED_WORKSPACE_IDS;
- handleHttpRequest.mockResolvedValueOnce({
- processedResponse: {
- response: {
- searchRecords: [
- {
- Id: '00Q1234567890ABC',
- IsConverted: false,
- ConvertedContactId: null,
- IsDeleted: false,
- },
- ],
+ it('should use SDK when workspace ID is undefined', async () => {
+ mockSalesforceSdk.query.mockResolvedValueOnce({
+ totalSize: 1,
+ records: [
+ {
+ Id: '00Q1234567890ABC',
+ IsConverted: false,
+ ConvertedContactId: null,
+ IsDeleted: false,
},
- status: 200,
- },
+ ],
});
const stateInfo = {
@@ -1512,7 +1568,8 @@ describe('Salesforce Utils', () => {
salesforceType: 'Lead',
salesforceId: '00Q1234567890ABC',
});
- expect(handleHttpRequest).toHaveBeenCalled();
+ expect(mockSalesforceSdk.query).toHaveBeenCalled();
+ expect(handleHttpRequest).not.toHaveBeenCalled();
});
});
});
diff --git a/src/v0/destinations/singular/README.md b/src/v0/destinations/singular/README.md
new file mode 100644
index 00000000000..995f9e4d817
--- /dev/null
+++ b/src/v0/destinations/singular/README.md
@@ -0,0 +1,373 @@
+# Singular Destination
+
+Implementation in **TypeScript** (v0)
+
+## Configuration
+
+### Required Settings
+
+- **API Key**: Singular SDK Key for API authentication (required)
+
+ - Retrieve from: Singular UI → Main Menu → [Developer Tools](https://app.singular.net/?#/react/sdk_integration)
+ - Do NOT use Reporting API Key - requests will be rejected
+ - Pattern: Alphanumeric with underscores (e.g., `sdkKey_afdadsf7asf56`)
+
+### Optional Settings
+
+- **API Secret**: Secret key for additional authentication
+
+ - Pattern: Alphanumeric only
+
+- **Session Event List**: Custom events to treat as session events
+
+ - These are in addition to default session events: `Application Installed`, `Application Updated`, `Application Opened`
+ - Default session events are matched **case-insensitively** (e.g., `application installed`, `Application Installed`, `APPLICATION INSTALLED` all work)
+ - Custom session events from `sessionEventList` are matched **case-sensitively** (exact match required)
+ - All session events are sent to the SESSION endpoint (`/api/v1/launch`) instead of the EVENT endpoint
+
+- **Match ID (Unity platforms)**: Match ID source for Unity/PC/Console platforms
+
+ - Options: `advertisingId` (default) or `hash` (maps to `properties.match_id`)
+ - Used for PC/Console game attribution where standard mobile device IDs are not available
+ - [PC & Console Game Integration Guide](https://support.singular.net/hc/en-us/articles/19147380039579-PC-Console-Game-Integration-Guide)
+
+- **Event Filtering**: Client-side event filtering for device-mode integrations
+
+ - Options: Disable, Allowlist (whitelistedEvents), Denylist (blacklistedEvents)
+
+- **Consent Management**: Support for OneTrust, Ketch, Iubenda, and Custom consent providers
+
+## Integration Functionalities
+
+> Singular supports **Device mode** and **Cloud mode** for mobile platforms (Android, iOS, React Native, Cordova)
+
+### Implementation Type
+
+- **v0 TypeScript Implementation**: Located at `src/v0/destinations/singular/`
+- **Not CDK v2**: Standard v0 destination implementation
+
+### Supported Message Types
+
+- **Track** (Cloud mode only)
+
+| Platform | Cloud Mode | Device Mode |
+| ------------ | ---------- | ----------------------- |
+| Android | track | identify, track, screen |
+| iOS | track | identify, track, screen |
+| React Native | track | identify, track, screen |
+| Cordova | track | identify, track, screen |
+| Web | track | - |
+| Unity | track | - |
+| Flutter | track | - |
+| Cloud | track | - |
+| Warehouse | track | - |
+
+### Batching Support
+
+- **Supported**: No
+- **Note**: Singular processes requests individually—no batch support in their API. Events are sent as individual GET requests.
+
+### Intermediate Calls
+
+- **Supported**: No
+- The Singular destination does not make any intermediate API calls. All events are transformed and sent directly to the appropriate endpoint.
+
+### Proxy Delivery
+
+- **Supported**: No
+- No custom `networkHandler.ts` file is present
+- Standard HTTP delivery is used
+
+### User Deletion
+
+- **Supported**: No
+- No `deleteUsers.ts` file is present in the destination
+
+### OAuth Support
+
+- **Supported**: No
+- Uses API Key authentication only
+
+### Processor vs Router Destination
+
+- **Type**: Processor Destination
+- `config.transformAtV1 == "processor"` in `db-config.json`
+
+### Partial Batching Response Handling
+
+- **Supported**: No
+- No custom networkHandler, standard single-event processing
+
+### Event Multiplexing
+
+- **Supported**: Yes (for revenue events with products array)
+- **Scenario**: When a non-session event contains a `properties.products` array, each product generates a separate revenue event
+- This multiplexing occurs to send individual revenue tracking for each product in an order
+
+```typescript
+// Multiplexing logic for products array
+if (!sessionEvent && Array.isArray(message?.properties?.products)) {
+ return generateRevenuePayloadArray(message.properties.products, payload, Config, {
+ endpoint,
+ endpointPath,
+ });
+}
+```
+
+### Supported Platforms
+
+| Platform | RudderStack Platform Mapping |
+| ------------------------------------- | ---------------------------- |
+| Android | `ANDROID` |
+| iOS (including iPadOS, watchOS, tvOS) | `IOS` |
+| PC | `unity` |
+| Xbox | `unity` |
+| PlayStation | `unity` |
+| Nintendo | `unity` |
+| MetaQuest | `unity` |
+
+#### Match ID for PC/Console Platforms
+
+For Unity/PC/Console platforms (PC, Xbox, PlayStation, Nintendo, MetaQuest), the `match_id` parameter is used for attribution since standard mobile advertising IDs (IDFA, AIFA) are not available.
+
+**Configuration Options**:
+
+- `advertisingId` (default): Uses `context.device.advertisingId` as the match_id value
+- `hash`: Uses `properties.match_id` from the event payload
+
+**Implementation Logic**:
+
+```typescript
+// If config is 'advertisingId' and advertisingId exists, use it
+if (Config.match_id === 'advertisingId' && message?.context?.device?.advertisingId) {
+ return { match_id: message?.context?.device?.advertisingId };
+}
+// Otherwise use properties.match_id if available
+if (message.properties?.match_id) {
+ return { match_id: message.properties.match_id };
+}
+```
+
+**Documentation**: [PC & Console Game Integration Guide](https://support.singular.net/hc/en-us/articles/19147380039579-PC-Console-Game-Integration-Guide)
+
+### API Endpoints
+
+| Endpoint | Version | Event Type | Description |
+| ---------------- | ------- | -------------- | ----------------------------------------------------------------------------- |
+| `/api/v1/launch` | V1 | Session Events | App lifecycle events (install, update, open) |
+| `/api/v1/evt` | V1 | Custom Events | Standard event tracking with platform device IDs |
+| `/api/v2/evt` | V2 | Custom Events | SDID-based event tracking (requires `integrations.Singular.singularDeviceId`) |
+
+#### V2 Event API Selection
+
+The V2 event API is used when `integrations.Singular.singularDeviceId` is present in the message:
+
+```typescript
+const shouldUseV2EventApi = (message: SingularMessage): boolean =>
+ getSingularDeviceIdFromMessage(message) !== undefined;
+```
+
+### Data Privacy Support
+
+- **Limit Data Sharing**: Supported via `integrations.Singular.limitDataSharing`
+ - Pass `true` to limit data sharing (user opted out)
+ - Pass `false` to allow data sharing (user opted in)
+
+```javascript
+// Example usage
+{
+ "integrations": {
+ "Singular": {
+ "singularDeviceId": "40009df0-d618-4d81-9da1-cbb3337b8dec", // Triggers V2 Event API
+ "limitDataSharing": false // Privacy consent (true = opted out, false = opted in)
+ }
+ }
+}
+```
+
+## Validations
+
+### Required Fields
+
+| Field | Event Type | Platform | Required |
+| ------------------------------------- | ---------- | -------- | -------- |
+| `event` | Track | All | Yes |
+| `context.os.name` | Track | All | Yes |
+| `context.app.namespace` | Track | All | Yes |
+| `context.ip` or `request_ip` | Track | All | Yes |
+| `context.os.version` | Track | All | Yes |
+| `context.device.attTrackingStatus` | Track | iOS | Yes |
+| `context.device.advertisingId` (idfa) | Track | iOS | Yes |
+| `context.device.id` (idfv) | Track | iOS | Yes |
+
+### Session Event Requirements (Additional)
+
+| Field | Platform | Required |
+| ----------------------------- | ------------ | -------- |
+| `context.app.version` | All | Yes |
+| `context.device.model` | Android, iOS | Yes |
+| `context.device.manufacturer` | Android, iOS | Yes |
+| `context.locale` | Android, iOS | Yes |
+| `context.app.build` | Android, iOS | Yes |
+| `properties.install` | Android, iOS | Yes |
+| `properties.install_receipt` | iOS | Yes |
+
+### Validation Errors
+
+- `Event type is required`: Missing `message.type`
+- `Event type {type} is not supported`: Only `track` events are supported in cloud mode
+- `Event name is not present for the event`: Missing `message.event`
+- `Platform name is missing from context.os.name`: Missing OS name
+- `Platform {platform} is not supported`: Unsupported platform
+
+## Rate Limits
+
+**NEEDS REVIEW**: Singular's S2S API documentation does not publicly specify explicit rate limits for the SESSION (`/api/v1/launch`) or EVENT (`/api/v1/evt`, `/api/v2/evt`) endpoints. Contact Singular support or your Customer Success Manager for specific rate limit information for your account.
+
+### Processing Constraints
+
+Based on the [S2S Integration Guide](https://support.singular.net/hc/en-us/articles/360037640812-Server-to-Server-Integration-Guide):
+
+- **Real-time Processing**: Requests are processed individually—no batch support
+- **No Bulk Import**: Events must be sent as individual GET requests
+- **Chronological Order**: Events must be sent in the order they occurred
+- **Session-First**: SESSION must be established before any EVENT calls
+
+### Error Handling
+
+- Refer to [S2S Response Codes & Error Handling](https://support.singular.net/hc/en-us/articles/31542603988379) for HTTP status codes and error responses
+
+## General Queries
+
+### Event Ordering
+
+#### Session Events (Application Installed, Updated, Opened)
+
+- **Required**: Yes (Critical)
+- Session events MUST be sent before any other events
+- Singular's attribution processing depends on receiving SESSION before EVENT calls
+- Invalid session order results in data inconsistencies and attribution errors
+
+#### Track Events
+
+- **Required**: Yes (Chronological)
+- Events must be sent in the order they occurred
+- Singular processes events based on timestamps (`utime` parameter)
+
+> **Critical**: SESSION must be established before any event tracking. Events must be sent chronologically.
+
+### Data Replay Feasibility
+
+#### Missing Data Replay
+
+- **Feasibility**: Limited
+- **SESSION Events**: Can be replayed, but may cause attribution issues if out of order
+- **EVENT Events**: Can be replayed with proper timestamps
+- **Constraint**: Singular does not deduplicate data—implement server-side deduplication to prevent duplicates
+
+#### Already Delivered Data Replay
+
+- **Not Recommended**
+- Singular does not deduplicate events—replaying will create duplicate records
+- **Impact**: Duplicate events may skew analytics and attribution metrics
+
+> **Important**: Singular explicitly states "No Deduplication: Singular does not deduplicate data—implement server-side deduplication to prevent duplicates" — [EVENT Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31496864868635-Server-to-Server-EVENT-Endpoint-API-Reference)
+
+### Multiplexing
+
+- **Supported**: Yes (Revenue Events with Products)
+- When a track event contains `properties.products` array (non-session event), the destination generates multiple individual revenue events
+- Each product in the array results in a separate API call to the EVENT endpoint
+
+#### Multiplexing Scenarios
+
+1. **Track Events with Products Array**:
+
+ - Input: Track event with `properties.products` array
+ - Output: N API calls to `/api/v1/evt` or `/api/v2/evt` (one per product)
+ - Multiplexing: YES
+
+2. **Standard Track Events**:
+
+ - Input: Track event without products array
+ - Output: Single API call
+ - Multiplexing: NO
+
+3. **Session Events**:
+ - Input: Session event (Application Installed/Updated/Opened or custom)
+ - Output: Single API call to `/api/v1/launch`
+ - Multiplexing: NO (products array is ignored for session events)
+
+## Version Information
+
+### Current API Version
+
+- **SESSION Endpoint**: V1 (`/api/v1/launch`)
+- **EVENT Endpoint**: V1 (`/api/v1/evt`) and V2 (`/api/v2/evt`)
+
+### Version Selection
+
+| API Version | Endpoint | Use Case |
+| ----------- | ---------------- | -------------------------------------------------- |
+| V1 | `/api/v1/launch` | All session events |
+| V1 | `/api/v1/evt` | Events with platform device IDs (IDFA, AIFA, etc.) |
+| V2 | `/api/v2/evt` | Events with Singular Device ID (SDID) only |
+
+### Deprecation Information
+
+**NEEDS REVIEW**: No specific deprecation timeline found in Singular's documentation. The V1 and V2 APIs appear to be maintained in parallel for different use cases rather than as successor versions.
+
+## Documentation Links
+
+### Singular S2S API Documentation
+
+- [S2S Integration Guide](https://support.singular.net/hc/en-us/articles/360037640812-Server-to-Server-Integration-Guide)
+- [SESSION Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31394799175963-Server-to-Server-SESSION-Endpoint-API-Reference)
+- [EVENT Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31496864868635-Server-to-Server-EVENT-Endpoint-API-Reference)
+- [S2S Response Codes & Error Handling](https://support.singular.net/hc/en-us/articles/31542603988379)
+- [S2S Integration Testing Guide](https://support.singular.net/hc/en-us/articles/360002675072)
+- [Retrieving Device Data Guide](https://support.singular.net/hc/en-us/articles/30848622982299-Server-to-Server-Retrieving-Device-Data-Guide)
+- [PC & Console Game Integration Guide](https://support.singular.net/hc/en-us/articles/19147380039579-PC-Console-Game-Integration-Guide)
+
+### Standard Events Documentation
+
+- [Defining In-App Events](https://support.singular.net/hc/en-us/articles/360036736891)
+- [Standard Event Naming Convention](https://support.singular.net/hc/en-us/articles/7648172966299)
+
+## RETL Functionality
+
+For RETL (Reverse ETL) functionality, please refer to [docs/retl.md](docs/retl.md)
+
+## Business Logic and Mappings
+
+For business logic and mappings information, please refer to [docs/businesslogic.md](docs/businesslogic.md)
+
+## Source Code Structure
+
+```
+src/v0/destinations/singular/
+├── transform.ts # Main transformation logic
+├── config.ts # Configuration constants and mappings
+├── types.ts # TypeScript type definitions
+├── util.ts # Utility functions
+└── data/ # Mapping configuration files
+ ├── SINGULARAndroidEventConfig.json
+ ├── SINGULARAndroidSessionConfig.json
+ ├── SINGULARIosEventConfig.json
+ ├── SINGULARIosSessionConfig.json
+ ├── SINGULARUnityEventConfig.json
+ ├── SINGULARUnitySessionConfig.json
+ ├── SINGULAREventProductConfig.json
+ └── v2/
+ ├── SINGULARAndroidEventConfig.json
+ ├── SINGULARIosEventConfig.json
+ └── SINGULARUnityEventConfig.json
+```
+
+## Test Files
+
+```
+test/integrations/destinations/singular/
+├── processor/data.ts # Processor test cases
+└── router/data.ts # Router test cases
+```
diff --git a/src/v0/destinations/singular/config.js b/src/v0/destinations/singular/config.ts
similarity index 51%
rename from src/v0/destinations/singular/config.js
rename to src/v0/destinations/singular/config.ts
index 97824b809b0..f44e23c3349 100644
--- a/src/v0/destinations/singular/config.js
+++ b/src/v0/destinations/singular/config.ts
@@ -1,6 +1,12 @@
-const { getMappingConfig } = require('../../util');
+import { getMappingConfig } from '../../util';
+import type { SingularPlatform, SingularPlatformMapping } from './types';
-const BASE_URL = 'https://s2s.singular.net/api/v1';
+const BASE_URL = 'https://s2s.singular.net/api';
+const SESSION_ENDPOINT_PATH_V1 = '/v1/launch';
+const EVENT_ENDPOINT_PATH_V1 = '/v1/evt';
+const EVENT_ENDPOINT_PATH_V2 = '/v2/evt';
+
+const PARTNER_OBJECT = { partner: 'rudderstack' };
// Supported events in Singular: SessionNotification, EventNotification
// ref: https://support.singular.net/hc/en-us/articles/360048588672-Server-to-Server-S2S-API-Endpoint-Reference
@@ -34,7 +40,23 @@ const CONFIG_CATEGORIES = {
},
};
-const SUPPORTED_PLATFORM = {
+// V2 event API: mapping configs in data/ (no platform device ids; sdid set in code from integration options)
+const CONFIG_CATEGORIES_V2 = {
+ EVENT_ANDROID: {
+ name: 'v2/SINGULARAndroidEventConfig',
+ type: 'track',
+ },
+ EVENT_IOS: {
+ name: 'v2/SINGULARIosEventConfig',
+ type: 'track',
+ },
+ EVENT_UNITY: {
+ name: 'v2/SINGULARUnityEventConfig',
+ type: 'track',
+ },
+};
+
+const SUPPORTED_PLATFORM: Readonly> = {
android: 'ANDROID',
ios: 'IOS',
pc: 'unity',
@@ -44,9 +66,15 @@ const SUPPORTED_PLATFORM = {
metaquest: 'unity',
};
-const SUPPORTED_UNTIY_SUBPLATFORMS = ['pc', 'xbox', 'playstation', 'nintendo', 'metaquest'];
+const SUPPORTED_UNTIY_SUBPLATFORMS: readonly SingularPlatform[] = [
+ 'pc',
+ 'xbox',
+ 'playstation',
+ 'nintendo',
+ 'metaquest',
+];
-const SINGULAR_SESSION_ANDROID_EXCLUSION = [
+const SINGULAR_SESSION_ANDROID_EXCLUSION: readonly string[] = [
'referring_application',
'asid',
'url',
@@ -55,7 +83,7 @@ const SINGULAR_SESSION_ANDROID_EXCLUSION = [
'install',
];
-const SINGULAR_SESSION_IOS_EXCLUSION = [
+const SINGULAR_SESSION_IOS_EXCLUSION: readonly string[] = [
'install_receipt',
'url',
'userAgent',
@@ -67,7 +95,7 @@ const SINGULAR_SESSION_IOS_EXCLUSION = [
'install',
];
-const SINGULAR_EVENT_ANDROID_EXCLUSION = [
+const SINGULAR_EVENT_ANDROID_EXCLUSION: readonly string[] = [
'price',
'quantity',
'currency',
@@ -81,7 +109,7 @@ const SINGULAR_EVENT_ANDROID_EXCLUSION = [
'products',
];
-const SINGULAR_EVENT_IOS_EXCLUSION = [
+const SINGULAR_EVENT_IOS_EXCLUSION: readonly string[] = [
'price',
'quantity',
'currency',
@@ -96,18 +124,34 @@ const SINGULAR_EVENT_IOS_EXCLUSION = [
'products',
];
-const SESSIONEVENTS = ['application installed', 'application updated', 'application opened'];
+const SESSIONEVENTS: readonly string[] = [
+ 'application installed',
+ 'application updated',
+ 'application opened',
+];
+
+/** V2 API: exclude singularDeviceId from event attributes (e) to avoid duplicating sdid query param */
+const SINGULAR_V2_EVENT_ATTRIBUTES_EXCLUDED_KEYS: readonly string[] = ['singularDeviceId'];
const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname);
-module.exports = {
+const MAPPING_CONFIG_V2 = getMappingConfig(CONFIG_CATEGORIES_V2, __dirname);
+
+export {
CONFIG_CATEGORIES,
+ CONFIG_CATEGORIES_V2,
MAPPING_CONFIG,
+ MAPPING_CONFIG_V2,
SESSIONEVENTS,
SINGULAR_SESSION_ANDROID_EXCLUSION,
SINGULAR_SESSION_IOS_EXCLUSION,
SINGULAR_EVENT_ANDROID_EXCLUSION,
SINGULAR_EVENT_IOS_EXCLUSION,
+ SINGULAR_V2_EVENT_ATTRIBUTES_EXCLUDED_KEYS,
SUPPORTED_PLATFORM,
SUPPORTED_UNTIY_SUBPLATFORMS,
BASE_URL,
+ SESSION_ENDPOINT_PATH_V1,
+ EVENT_ENDPOINT_PATH_V1,
+ EVENT_ENDPOINT_PATH_V2,
+ PARTNER_OBJECT,
};
diff --git a/src/v0/destinations/singular/data/v2/SINGULARAndroidEventConfig.json b/src/v0/destinations/singular/data/v2/SINGULARAndroidEventConfig.json
new file mode 100644
index 00000000000..9fe3b3af405
--- /dev/null
+++ b/src/v0/destinations/singular/data/v2/SINGULARAndroidEventConfig.json
@@ -0,0 +1,93 @@
+[
+ {
+ "destKey": "n",
+ "sourceKeys": "event",
+ "required": true
+ },
+ {
+ "destKey": "p",
+ "sourceKeys": "context.os.name",
+ "required": true
+ },
+ {
+ "destKey": "i",
+ "sourceKeys": "context.app.namespace",
+ "required": true
+ },
+ {
+ "destKey": "ip",
+ "sourceKeys": ["context.ip", "request_ip"],
+ "required": true
+ },
+ {
+ "destKey": "ve",
+ "sourceKeys": "context.os.version",
+ "required": true
+ },
+ {
+ "destKey": "custom_user_id",
+ "sourceKeys": "userIdOnly",
+ "sourceFromGenericMap": true,
+ "required": false
+ },
+ {
+ "destKey": "is_revenue_event",
+ "sourceKeys": "properties.is_revenue_event",
+ "required": false
+ },
+ {
+ "destKey": "utime",
+ "sourceKeys": "timestamp",
+ "sourceFromGenericMap": true,
+ "required": false,
+ "metadata": {
+ "type": "secondTimestamp"
+ }
+ },
+ {
+ "destKey": "amt",
+ "sourceKeys": [
+ "properties.total",
+ "properties.value",
+ "properties.revenue",
+ {
+ "operation": "multiplication",
+ "args": [
+ {
+ "sourceKeys": "properties.price"
+ },
+ {
+ "sourceKeys": "properties.quantity",
+ "default": 1
+ }
+ ]
+ }
+ ],
+ "required": false
+ },
+ {
+ "destKey": "cur",
+ "sourceKeys": "properties.currency",
+ "required": false
+ },
+ {
+ "destKey": "purchase_receipt",
+ "sourceKeys": "properties.purchase_receipt",
+ "required": false
+ },
+ {
+ "destKey": "purchase_product_id",
+ "sourceKeys": ["properties.product_id", "properties.sku"],
+ "required": false
+ },
+ {
+ "destKey": "purchase_transaction_id",
+ "sourceKeys": ["properties.orderId", "properties.purchase_transaction_id"],
+ "required": false
+ },
+ {
+ "destKey": "receipt_signature",
+ "sourceKeys": "properties.receipt_signature",
+ "required": false
+ }
+]
diff --git a/src/v0/destinations/singular/data/v2/SINGULARIosEventConfig.json b/src/v0/destinations/singular/data/v2/SINGULARIosEventConfig.json
new file mode 100644
index 00000000000..e035daf70aa
--- /dev/null
+++ b/src/v0/destinations/singular/data/v2/SINGULARIosEventConfig.json
@@ -0,0 +1,114 @@
+[
+ {
+ "destKey": "n",
+ "sourceKeys": "event",
+ "required": true
+ },
+ {
+ "destKey": "p",
+ "sourceKeys": "context.os.name",
+ "required": true
+ },
+ {
+ "destKey": "i",
+ "sourceKeys": "context.app.namespace",
+ "required": true
+ },
+ {
+ "destKey": "ip",
+ "sourceKeys": ["context.ip", "request_ip"],
+ "required": true
+ },
+ {
+ "destKey": "ve",
+ "sourceKeys": "context.os.version",
+ "required": true
+ },
+ {
+ "destKey": "att_authorization_status",
+ "sourceKeys": "context.device.attTrackingStatus",
+ "required": true
+ },
+ {
+ "destKey": "custom_user_id",
+ "sourceKeys": "userIdOnly",
+ "sourceFromGenericMap": true,
+ "required": false
+ },
+ {
+ "destKey": "utime",
+ "sourceKeys": "timestamp",
+ "sourceFromGenericMap": true,
+ "required": false,
+ "metadata": {
+ "type": "secondTimestamp"
+ }
+ },
+ {
+ "destKey": "skan_conversion_value",
+ "sourceKeys": "properties.skan_conversion_value",
+ "required": false
+ },
+ {
+ "destKey": "skan_first_call_timestamp",
+ "sourceKeys": "properties.skan_first_call_timestamp",
+ "required": false,
+ "metadata": {
+ "type": "secondTimestamp"
+ }
+ },
+ {
+ "destKey": "skan_last_call_timestamp",
+ "sourceKeys": "properties.skan_last_call_timestamp",
+ "required": false,
+ "metadata": {
+ "type": "secondTimestamp"
+ }
+ },
+ {
+ "destKey": "is_revenue_event",
+ "sourceKeys": "properties.is_revenue_event",
+ "required": false
+ },
+ {
+ "destKey": "amt",
+ "sourceKeys": [
+ "properties.total",
+ "properties.value",
+ "properties.revenue",
+ {
+ "operation": "multiplication",
+ "args": [
+ {
+ "sourceKeys": "properties.price"
+ },
+ {
+ "sourceKeys": "properties.quantity",
+ "default": 1
+ }
+ ]
+ }
+ ],
+ "required": false
+ },
+ {
+ "destKey": "cur",
+ "sourceKeys": "properties.currency",
+ "required": false
+ },
+ {
+ "destKey": "purchase_receipt",
+ "sourceKeys": "properties.purchase_receipt",
+ "required": false
+ },
+ {
+ "destKey": "purchase_product_id",
+ "sourceKeys": ["properties.product_id", "properties.sku"],
+ "required": false
+ },
+ {
+ "destKey": "purchase_transaction_id",
+ "sourceKeys": ["properties.orderId", "properties.purchase_transaction_id"],
+ "required": false
+ }
+]
diff --git a/src/v0/destinations/singular/data/v2/SINGULARUnityEventConfig.json b/src/v0/destinations/singular/data/v2/SINGULARUnityEventConfig.json
new file mode 100644
index 00000000000..936da9381c0
--- /dev/null
+++ b/src/v0/destinations/singular/data/v2/SINGULARUnityEventConfig.json
@@ -0,0 +1,107 @@
+[
+ {
+ "destKey": "p",
+ "sourceKeys": "context.os.name",
+ "required": true
+ },
+ {
+ "destKey": "i",
+ "sourceKeys": "context.app.namespace",
+ "required": true
+ },
+ {
+ "destKey": "is_revenue_event",
+ "sourceKeys": "properties.is_revenue_event",
+ "required": false
+ },
+ {
+ "destKey": "n",
+ "sourceKeys": "event",
+ "required": true
+ },
+ {
+ "destKey": "av",
+ "sourceKeys": "context.app.version",
+ "required": false
+ },
+ {
+ "destKey": "ve",
+ "sourceKeys": "context.os.version",
+ "required": false
+ },
+ {
+ "destKey": "os",
+ "sourceKeys": "properties.os",
+ "required": true
+ },
+ {
+ "destKey": "ip",
+ "sourceKeys": ["context.ip", "request_ip"],
+ "required": true
+ },
+ {
+ "destKey": "use_ip",
+ "sourceKeys": "properties.use_ip",
+ "required": false
+ },
+ {
+ "destKey": "install_source",
+ "sourceKeys": "properties.install_source",
+ "required": true
+ },
+ {
+ "destKey": "data_sharing_options",
+ "sourceKeys": "properties.data_sharing_options",
+ "required": false
+ },
+ {
+ "destKey": "amt",
+ "sourceKeys": [
+ "properties.total",
+ "properties.value",
+ "properties.revenue",
+ {
+ "operation": "multiplication",
+ "args": [
+ {
+ "sourceKeys": "properties.price"
+ },
+ {
+ "sourceKeys": "properties.quantity",
+ "default": 1
+ }
+ ]
+ }
+ ],
+ "required": false
+ },
+ {
+ "destKey": "cur",
+ "sourceKeys": "properties.currency",
+ "required": false
+ },
+ {
+ "destKey": "ua",
+ "sourceKeys": "context.userAgent",
+ "required": false
+ },
+ {
+ "destKey": "utime",
+ "sourceKeys": "timestamp",
+ "sourceFromGenericMap": true,
+ "required": false,
+ "metadata": {
+ "type": "secondTimestamp"
+ }
+ },
+ {
+ "destKey": "custom_user_id",
+ "sourceKeys": "properties.custom_user_id",
+ "required": false
+ },
+ {
+ "destKey": "install",
+ "sourceKeys": "properties.install",
+ "required": false
+ }
+]
diff --git a/src/v0/destinations/singular/docs/businesslogic.md b/src/v0/destinations/singular/docs/businesslogic.md
new file mode 100644
index 00000000000..d9d46966178
--- /dev/null
+++ b/src/v0/destinations/singular/docs/businesslogic.md
@@ -0,0 +1,319 @@
+# Singular Business Logic and Mappings
+
+## Overview
+
+This document outlines the business logic and mappings used in the Singular destination integration. It covers how RudderStack events are mapped to Singular's S2S API format, the specific API endpoints used for each event type, and the special handling for various scenarios.
+
+## API Endpoints and Request Flow
+
+### SESSION Endpoint (`/api/v1/launch`)
+
+**Documentation**: [Singular SESSION Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31394799175963)
+
+**Purpose**: Track user sessions and enable attribution for app installs, re-engagement, and retention metrics.
+
+**When Used**:
+
+- Default session events: `Application Installed`, `Application Updated`, `Application Opened`
+- Custom session events defined in `sessionEventList` configuration
+
+**Request Flow**:
+
+1. Event received with session event name
+2. Platform detected from `context.os.name`
+3. Platform-specific session payload constructed using mapping config
+4. Additional parameters added (dnt, openuri, connection type)
+5. GET request sent to `https://s2s.singular.net/api/v1/launch`
+
+### EVENT Endpoint V1 (`/api/v1/evt`)
+
+**Documentation**: [Singular EVENT Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31496864868635)
+
+**Purpose**: Track in-app events and revenue for attribution analysis.
+
+**When Used**:
+
+- Non-session track events
+- When `integrations.Singular.singularDeviceId` is NOT present
+
+**Request Flow**:
+
+1. Event received with non-session event name
+2. Platform detected from `context.os.name`
+3. Platform-specific event payload constructed
+4. Custom event attributes extracted and added to `e` parameter
+5. Revenue parameters added if applicable
+6. GET request sent to `https://s2s.singular.net/api/v1/evt`
+
+### EVENT Endpoint V2 (`/api/v2/evt`)
+
+**Documentation**: [Singular EVENT Endpoint API Reference](https://support.singular.net/hc/en-us/articles/31496864868635)
+
+**Purpose**: Simplified event tracking using Singular Device ID (SDID) only.
+
+**When Used**:
+
+- Non-session track events
+- When `integrations.Singular.singularDeviceId` IS present
+
+**Request Flow**:
+
+1. Event received with non-session event name
+2. `singularDeviceId` detected in integrations object
+3. V2 mapping config used (excludes platform device IDs)
+4. `sdid` parameter set from `integrations.Singular.singularDeviceId`
+5. GET request sent to `https://s2s.singular.net/api/v2/evt`
+
+## Event Processing Logic
+
+### Session Event Detection
+
+See `util.ts` lines 130–134 (`isSessionEvent`). Default session event names are defined in `config.ts` (`SESSIONEVENTS`).
+
+**Case Sensitivity**:
+
+- **Default session events** (`Application Installed`, `Application Updated`, `Application Opened`): Matched **case-insensitively**. Any casing works (e.g., `application installed`, `APPLICATION INSTALLED`).
+- **Custom session events** (from `sessionEventList` config): Matched **case-sensitively**. Must match exactly as configured.
+
+### Platform Detection and Normalization
+
+See `util.ts` lines 346–376 (`platformWisePayloadGenerator`). Supported platform mapping is in `config.ts` lines 58–66 (`SUPPORTED_PLATFORM`).
+
+### API Version Selection
+
+See `util.ts` lines 381–396 (`getEndpoint`). V2 is chosen when `integrations.Singular.singularDeviceId` is present; see `shouldUseV2EventApi` at lines 122–123.
+
+## Mapping Configurations
+
+### Android Session Mapping (`SINGULARAndroidSessionConfig.json`)
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| ------------------------------ | ------------------------- | -------- | --------------------- |
+| `context.os.name` | `p` | Yes | Platform (Android) |
+| `context.app.namespace` | `i` | Yes | App package name |
+| `context.app.version` | `app_v` | Yes | App version |
+| `context.ip` / `request_ip` | `ip` | Yes | Device IP |
+| `context.os.version` | `ve` | Yes | OS version |
+| `context.device.model` | `mo` | Yes | Device model |
+| `context.device.manufacturer` | `ma` | Yes | Device manufacturer |
+| `context.locale` | `lc` | Yes | Device locale |
+| `context.app.build` | `bd` | Yes | Build identifier |
+| `properties.install` | `install` | Yes | Install indicator |
+| `event` | `sessionNotificationName` | Yes | Session event name |
+| `context.app.name` | `n` | No | App name |
+| `context.network.carrier` | `cn` | No | Network carrier |
+| `context.device.token` | `fcm` | No | FCM token |
+| `context.device.advertisingId` | `aifa` | No | Google Advertising ID |
+| `context.device.id` | `andi` | No | Android ID |
+| `properties.asid` | `asid` | No | App Set ID |
+| `properties.install_ref` | `install_ref` | No | Install referrer |
+| `userId` | `custom_user_id` | No | Custom user ID |
+| `timestamp` | `utime` | No | Unix timestamp |
+| `timestamp` | `install_time` | Yes | Install timestamp |
+| `timestamp` | `update_time` | Yes | Update timestamp |
+| `context.userAgent` | `ua` | No | User agent |
+
+### Android Event Mapping (`SINGULARAndroidEventConfig.json`)
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| ------------------------------------ | ------------------------- | -------- | ----------------- |
+| `event` | `n` | Yes | Event name |
+| `context.os.name` | `p` | Yes | Platform |
+| `context.app.namespace` | `i` | Yes | App identifier |
+| `context.ip` / `request_ip` | `ip` | Yes | Device IP |
+| `context.os.version` | `ve` | Yes | OS version |
+| `userId` | `custom_user_id` | No | Custom user ID |
+| `properties.is_revenue_event` | `is_revenue_event` | No | Revenue flag |
+| `timestamp` | `utime` | No | Unix timestamp |
+| `properties.total/value/revenue` | `amt` | No | Revenue amount |
+| `properties.currency` | `cur` | No | Currency code |
+| `properties.purchase_receipt` | `purchase_receipt` | No | Receipt |
+| `properties.product_id/sku` | `purchase_product_id` | No | Product ID |
+| `properties.purchase_transaction_id` | `purchase_transaction_id` | No | Transaction ID |
+| `context.device.advertisingId` | `aifa` | No | GAID |
+| `context.device.id` | `andi` | No | Android ID |
+| `properties.asid` | `asid` | No | App Set ID |
+| `properties.receipt_signature` | `receipt_signature` | No | Receipt signature |
+
+### iOS Session Mapping (`SINGULARIosSessionConfig.json`)
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| ---------------------------------- | -------------------------- | -------- | -------------------- |
+| `context.os.name` | `p` | Yes | Platform (iOS) |
+| `context.app.namespace` | `i` | Yes | Bundle ID |
+| `context.app.version` | `app_v` | Yes | App version |
+| `context.ip` / `request_ip` | `ip` | Yes | Device IP |
+| `context.os.version` | `ve` | Yes | OS version |
+| `context.device.model` | `mo` | Yes | Device model |
+| `context.device.manufacturer` | `ma` | Yes | Manufacturer (Apple) |
+| `context.locale` | `lc` | Yes | Device locale |
+| `context.app.build` | `bd` | Yes | Build identifier |
+| `properties.install` | `install` | Yes | Install indicator |
+| `properties.install_receipt` | `install_receipt` | Yes | iOS install receipt |
+| `context.device.attTrackingStatus` | `att_authorization_status` | Yes | ATT status |
+| `event` | `sessionNotificationName` | Yes | Session event name |
+| `context.device.advertisingId` | `idfa` | Yes | IDFA |
+| `context.device.id` | `idfv` | Yes | IDFV |
+| `context.app.name` | `n` | No | App name |
+| `context.network.carrier` | `cn` | No | Network carrier |
+| `context.device.token` | `apns_token` | No | APNs token |
+| `properties.userAgent` | `ua` | No | User agent |
+| `properties.attribution_token` | `attribution_token` | No | ASA attribution |
+| `properties.skan_conversion_value` | `skan_conversion_value` | No | SKAN value |
+| `userId` | `custom_user_id` | No | Custom user ID |
+| `timestamp` | `utime` | No | Unix timestamp |
+| `timestamp` | `install_time` | Yes | Install timestamp |
+| `timestamp` | `update_time` | Yes | Update timestamp |
+
+### iOS Event Mapping (`SINGULARIosEventConfig.json`)
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| ------------------------------------ | -------------------------- | -------- | -------------- |
+| `event` | `n` | Yes | Event name |
+| `context.os.name` | `p` | Yes | Platform |
+| `context.app.namespace` | `i` | Yes | Bundle ID |
+| `context.ip` / `request_ip` | `ip` | Yes | Device IP |
+| `context.os.version` | `ve` | Yes | OS version |
+| `context.device.attTrackingStatus` | `att_authorization_status` | Yes | ATT status |
+| `context.device.advertisingId` | `idfa` | Yes | IDFA |
+| `context.device.id` | `idfv` | Yes | IDFV |
+| `userId` | `custom_user_id` | No | Custom user ID |
+| `properties.is_revenue_event` | `is_revenue_event` | No | Revenue flag |
+| `timestamp` | `utime` | No | Unix timestamp |
+| `properties.skan_conversion_value` | `skan_conversion_value` | No | SKAN value |
+| `properties.total/value/revenue` | `amt` | No | Revenue amount |
+| `properties.currency` | `cur` | No | Currency code |
+| `properties.purchase_receipt` | `purchase_receipt` | No | Receipt |
+| `properties.product_id/sku` | `purchase_product_id` | No | Product ID |
+| `properties.purchase_transaction_id` | `purchase_transaction_id` | No | Transaction ID |
+
+### Unity/PC/Console Event Mapping (`SINGULARUnityEventConfig.json`)
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| -------------------------------- | ------------------ | -------- | ------------------ |
+| `context.os.name` | `p` | Yes | Platform |
+| `context.app.namespace` | `i` | Yes | App identifier |
+| `context.device.id` | `sdid` | No | Singular Device ID |
+| `event` | `n` | Yes | Event name |
+| `context.app.version` | `av` | No | App version |
+| `context.os.version` | `ve` | No | OS version |
+| `properties.os` | `os` | Yes | OS type |
+| `context.ip` / `request_ip` | `ip` | Yes | Device IP |
+| `properties.install_source` | `install_source` | Yes | Install source |
+| `properties.is_revenue_event` | `is_revenue_event` | No | Revenue flag |
+| `timestamp` | `utime` | No | Unix timestamp |
+| `properties.total/value/revenue` | `amt` | No | Revenue amount |
+| `properties.currency` | `cur` | No | Currency code |
+| `context.userAgent` | `ua` | No | User agent |
+| `properties.custom_user_id` | `custom_user_id` | No | Custom user ID |
+
+### V2 Event Mappings
+
+V2 mappings (in `data/v2/`) are similar to V1 but exclude platform-specific device identifiers (IDFA, IDFV, AIFA, etc.) since V2 uses only `sdid` (Singular Device ID).
+
+### Product Mapping (`SINGULAREventProductConfig.json`)
+
+Used for revenue events with `properties.products` array:
+
+| RudderStack Field | Singular Parameter | Required | Description |
+| ------------------------------------ | ------------------------- | -------- | ------------------ |
+| `product_id` / `sku` | `purchase_product_id` | No | Product identifier |
+| `total/value/revenue/price*quantity` | `amt` | No | Amount |
+| `purchase_receipt` | `purchase_receipt` | No | Receipt |
+| `currency` | `cur` | No | Currency |
+| `purchase_transaction_id` | `purchase_transaction_id` | No | Transaction ID |
+
+## Special Handling
+
+### Revenue Events with Products Array
+
+When a non-session event contains `properties.products` array, each product generates a separate revenue event. See `transform.ts` lines 36–40 for the condition and call; see `util.ts` lines 72–97 (`generateRevenuePayloadArray`) for per-product payload construction.
+
+Each product in the array results in:
+
+1. Base payload parameters (platform, IP, timestamp, etc.)
+2. Product-specific revenue parameters (amt, cur, purchase_product_id)
+3. `is_revenue_event: true` flag
+4. Partner identification (`partner: 'rudderstack'`)
+
+### Do Not Track (DNT) Handling
+
+For session events on Android/iOS platforms, `context.device.adTrackingEnabled === true` implies `dnt = 0` (tracking allowed); otherwise `dnt = 1`. See `util.ts` lines 259–270 (`createSessionPayload`).
+
+### Connection Type Detection
+
+See `util.ts` lines 238–239 (`getConnectionType`).
+
+### Match ID for Unity Platforms
+
+For PC/Xbox/PlayStation/Nintendo/MetaQuest platforms, `match_id` is taken from `context.device.advertisingId` when `Config.match_id === 'advertisingId'`, otherwise from `properties.match_id`. See `util.ts` lines 220–231 (`getMatchObject`).
+
+### Data Sharing Options
+
+Privacy consent is read from `integrations.Singular.limitDataSharing` and passed as `data_sharing_options`. See `util.ts` lines 141–150 (`getDataSharingOptionsFromMessage`).
+
+### Custom Event Attributes
+
+Non-mapped properties are extracted and sent as custom event attributes in the `e` parameter. See `util.ts` lines 55–61 (`extractExtraFields`). For V2 API, `singularDeviceId` is excluded from attributes; see `config.ts` (`SINGULAR_V2_EVENT_ATTRIBUTES_EXCLUDED_KEYS`) and `util.ts` lines 321–324.
+
+## Revenue Amount Calculation
+
+The revenue amount (`amt`) is calculated from multiple sources with fallback. See `data/SINGULARAndroidEventConfig.json` (and equivalent iOS/Unity configs) for the `amt` mapping.
+
+Priority:
+
+1. `properties.total`
+2. `properties.value`
+3. `properties.revenue`
+4. `properties.price * properties.quantity` (quantity defaults to 1)
+
+## Timestamp Handling
+
+Timestamps are converted to Unix epoch seconds via the `utime` mapping with `type: "secondTimestamp"`. See the platform event/session configs in `data/` (e.g. `SINGULARAndroidEventConfig.json`).
+
+## Partner Identification
+
+All requests include RudderStack partner identification (`partner: 'rudderstack'`). See `config.ts` line 9 (`PARTNER_OBJECT`) and `util.ts` (e.g. lines 84–86, 296–298) where it is merged into payloads.
+
+## Error Handling
+
+### Validation Errors
+
+| Error | Condition |
+| --------------------------------------------------------------------- | ----------------------------- |
+| `InstrumentationError: Event type is required` | Missing `message.type` |
+| `InstrumentationError: Event type {type} is not supported` | `message.type` is not `track` |
+| `InstrumentationError: Event name is not present for the event` | Missing `message.event` |
+| `InstrumentationError: Platform name is missing from context.os.name` | Missing or invalid OS name |
+| `InstrumentationError: Platform {platform} is not supported` | Unsupported platform |
+| `TransformationError: Failed to Create {platform} {type} Payload` | Payload construction failed |
+
+## Request Format
+
+All requests are sent as HTTP GET with URL query parameters. See `transform.ts` lines 42–51 (single-event response) and `util.ts` lines 90–96 (batch response for products array).
+
+## Summary
+
+The Singular destination transforms RudderStack track events into Singular S2S API requests:
+
+1. **Event Classification**: Session vs non-session based on event name
+2. **Platform Detection**: Android, iOS, or Unity (PC/Console)
+3. **API Selection**: V1 launch (session), V1 evt (event), or V2 evt (SDID-based)
+4. **Payload Construction**: Platform-specific mapping configurations
+5. **Special Handling**: Revenue events, products array, DNT, connection type
+6. **Request Delivery**: HTTP GET with query parameters
+
+Key differentiators by platform and API version:
+
+**V1 API** (default, platform device IDs):
+
+- **Android**: Uses AIFA, ASID, ANDI identifiers
+- **iOS**: Uses IDFA, IDFV, ATT status, SKAdNetwork parameters
+- **Unity/PC/Console**: Uses `match_id` (from `context.device.advertisingId` or `properties.match_id`)
+
+**V2 API** (when `integrations.Singular.singularDeviceId` is provided):
+
+- **All platforms**: Uses `sdid` (Singular Device ID) instead of platform-specific device IDs
+- **Unity/PC/Console**: Also includes `match_id` in addition to `sdid`
+
+[PC & Console Game Integration Guide](https://support.singular.net/hc/en-us/articles/19147380039579-PC-Console-Game-Integration-Guide)
diff --git a/src/v0/destinations/singular/docs/retl.md b/src/v0/destinations/singular/docs/retl.md
new file mode 100644
index 00000000000..4ddb1ffb2f2
--- /dev/null
+++ b/src/v0/destinations/singular/docs/retl.md
@@ -0,0 +1,151 @@
+# Singular RETL Functionality
+
+## Is RETL Supported?
+
+**RETL (Reverse ETL) Support**: **Yes**
+
+The Singular destination supports RETL functionality. Evidence:
+
+- `supportedSourceTypes` includes `warehouse` in `db-config.json`
+- JSON mapper is supported by default (no `disableJsonMapper: true` in config)
+- Does NOT have `supportsVisualMapper: true`, so VDM V1 is not supported
+- Standard event processing applies to warehouse-sourced events
+
+## RETL Support Analysis
+
+### Which type of RETL support does it have?
+
+- **JSON Mapper**: Supported (default, no `disableJsonMapper: true`)
+- **VDM V1**: Not Supported (no `supportsVisualMapper: true` in `db-config.json`)
+- **VDM V2**: Not Supported (no `record` in `supportedMessageTypes`)
+
+### Does it have VDM support?
+
+**No** - `supportsVisualMapper` is not present in `db-config.json`.
+
+### Does it have VDM V2 support?
+
+**No** - Missing both:
+
+- `supportedMessageTypes > record` in `db-config.json`
+- Record event type handling in transformer code
+
+### Connection Configuration
+
+Standard Singular configuration applies:
+
+- **API Key**: Singular SDK Key (required)
+- **API Secret**: Optional secret key
+- **Session Event List**: Custom session events to track
+- **Match ID**: Unity platform identifier mapping
+
+## RETL Flow Implementation
+
+### Warehouse Integration
+
+Singular supports RETL through warehouse sources with JSON mapper functionality:
+
+- **Supported**: Yes, warehouse sources can send data to Singular via RETL
+- **Connection Mode**: Cloud mode only
+- **Message Types**: Track events only
+- **Data Flow**: Warehouse/Database → RudderStack → Singular (via S2S REST API)
+- **Mapping**: JSON mapper transforms warehouse data to Singular format
+
+### Supported Message Types for RETL
+
+```json
+"supportedMessageTypes": {
+ "cloud": ["track"],
+ "warehouse": ["track"]
+}
+```
+
+### RETL Event Processing
+
+The Singular destination processes RETL events the same as standard cloud events. There is no special `mappedToDestination` handling in the Singular implementation.
+
+#### Key RETL Behaviors
+
+1. **Standard Processing**:
+
+ - RETL events are processed through the same transformation pipeline as regular events
+ - No special flags or overrides are implemented
+
+2. **Event Type Support**:
+
+ - Only `track` events are supported for RETL
+ - Events must include all required fields for the target platform
+
+3. **Session vs Event Determination**:
+ - The `sessionEventList` configuration determines which events are treated as session events
+ - Session events are sent to `/api/v1/launch`
+ - Non-session events are sent to `/api/v1/evt` or `/api/v2/evt`
+
+## Data Flow
+
+### RETL Data Processing
+
+1. **Data Extraction**: Warehouse/database data extracted by RudderStack
+2. **Mapping**: Data transformed using JSON mapper configuration
+3. **Event Construction**: Warehouse records converted to track events
+4. **Standard Processing**: Events processed through Singular transformer
+5. **API Delivery**: Events sent to Singular via S2S REST API
+
+### Example RETL Event
+
+```json
+{
+ "type": "track",
+ "event": "Purchase Completed",
+ "userId": "user123",
+ "properties": {
+ "revenue": 99.99,
+ "currency": "USD",
+ "product_id": "prod456"
+ },
+ "context": {
+ "os": {
+ "name": "Android",
+ "version": "12"
+ },
+ "app": {
+ "namespace": "com.example.app",
+ "version": "1.0.0",
+ "build": "100"
+ },
+ "device": {
+ "advertisingId": "8ecd7512-2864-440c-93f3-a3cabe62525b",
+ "id": "fc8d449516de0dfb"
+ },
+ "ip": "192.168.1.1",
+ "locale": "en-US"
+ }
+}
+```
+
+## Summary
+
+The Singular destination supports RETL functionality through:
+
+- **RETL Support**: Yes, via warehouse source type support
+- **JSON Mapper**: Supported by default for data transformation
+- **VDM V1**: Not Supported
+- **VDM V2**: Not Supported
+- **Supported Events**: Track events only
+- **API Integration**: Singular S2S REST API for data delivery
+- **Special RETL Logic**: None - standard event processing
+
+**Key Features**:
+
+- Standard track event processing for warehouse data
+- Platform-specific payload generation (Android, iOS, Unity)
+- Session vs Event endpoint routing based on configuration
+- Revenue tracking support with products array multiplexing
+
+**Limitations**:
+
+- No VDM V1/V2 support
+- Track events only (no identify, group, etc.)
+- Cloud mode only for RETL functionality
+- No special `mappedToDestination` handling
+- Requires all platform-specific required fields
diff --git a/src/v0/destinations/singular/transform.js b/src/v0/destinations/singular/transform.js
deleted file mode 100644
index ed6757c47ba..00000000000
--- a/src/v0/destinations/singular/transform.js
+++ /dev/null
@@ -1,69 +0,0 @@
-const { InstrumentationError } = require('@rudderstack/integrations-lib');
-const { BASE_URL } = require('./config');
-const {
- defaultRequestConfig,
- defaultGetRequestConfig,
- simpleProcessRouterDest,
-} = require('../../util');
-
-const {
- platformWisePayloadGenerator,
- generateRevenuePayloadArray,
- isSessionEvent,
-} = require('./util');
-
-const responseBuilderSimple = (message, { Config }) => {
- const eventName = message.event;
-
- if (!eventName) {
- throw new InstrumentationError('Event name is not present for the event');
- }
-
- const sessionEvent = isSessionEvent(Config, eventName);
- const { eventAttributes, payload } = platformWisePayloadGenerator(message, sessionEvent, Config);
- const endpoint = sessionEvent ? `${BASE_URL}/launch` : `${BASE_URL}/evt`;
-
- // If we have an event where we have an array of Products, example Order Completed
- // We will convert the event to revenue events
- if (!sessionEvent && Array.isArray(message?.properties?.products)) {
- return generateRevenuePayloadArray(
- message.properties.products,
- payload,
- Config,
- eventAttributes,
- );
- }
-
- const response = {
- ...defaultRequestConfig(),
- endpoint,
- params: { ...payload, a: Config.apiKey },
- method: defaultGetRequestConfig.requestMethod,
- };
- if (eventAttributes) {
- response.params = { ...response.params, e: eventAttributes };
- }
- return response;
-};
-
-const processEvent = (message, destination) => {
- if (!message.type) {
- throw new InstrumentationError('Event type is required');
- }
- const messageType = message.type.toLowerCase();
-
- if (messageType === 'track') {
- return responseBuilderSimple(message, destination);
- }
-
- throw new InstrumentationError(`Event type ${messageType} is not supported`);
-};
-
-const process = (event) => processEvent(event.message, event.destination);
-
-const processRouterDest = async (inputs, reqMetadata) => {
- const respList = await simpleProcessRouterDest(inputs, process, reqMetadata);
- return respList;
-};
-
-module.exports = { process, processRouterDest };
diff --git a/src/v0/destinations/singular/transform.ts b/src/v0/destinations/singular/transform.ts
new file mode 100644
index 00000000000..9f7ef1812b1
--- /dev/null
+++ b/src/v0/destinations/singular/transform.ts
@@ -0,0 +1,83 @@
+import { InstrumentationError } from '@rudderstack/integrations-lib';
+import { defaultRequestConfig, defaultGetRequestConfig, simpleProcessRouterDest } from '../../util';
+
+import { PARTNER_OBJECT } from './config';
+import type {
+ SingularMessage,
+ SingularDestination,
+ SingularRouterRequest,
+ SingularBatchRequest,
+ SingularProcessorRequest,
+} from './types';
+
+import {
+ platformWisePayloadGenerator,
+ generateRevenuePayloadArray,
+ isSessionEvent,
+ getEndpoint,
+} from './util';
+
+const responseBuilderSimple = (
+ message: SingularMessage,
+ { Config }: SingularDestination,
+): SingularBatchRequest | SingularBatchRequest[] => {
+ const eventName = message.event;
+
+ if (!eventName) {
+ throw new InstrumentationError('Event name is not present for the event');
+ }
+
+ const sessionEvent = isSessionEvent(Config, eventName);
+ const payload = platformWisePayloadGenerator(message, sessionEvent, Config);
+ const { endpoint, endpointPath } = getEndpoint(message, sessionEvent);
+
+ // If we have an event where we have an array of Products, example Order Completed
+ // We will convert the event to revenue events
+ if (!sessionEvent && Array.isArray(message?.properties?.products)) {
+ return generateRevenuePayloadArray(message.properties.products, payload, Config, {
+ endpoint,
+ endpointPath,
+ });
+ }
+
+ // Build params with API key
+ const params = { ...payload, a: Config.apiKey, ...PARTNER_OBJECT };
+
+ const response: SingularBatchRequest = {
+ ...defaultRequestConfig(),
+ endpoint,
+ endpointPath,
+ params,
+ method: defaultGetRequestConfig.requestMethod,
+ };
+
+ return response;
+};
+
+const processEvent = (
+ message: SingularMessage,
+ destination: SingularDestination,
+): SingularBatchRequest | SingularBatchRequest[] => {
+ if (!message.type) {
+ throw new InstrumentationError('Event type is required');
+ }
+ const messageType = message.type.toLowerCase();
+
+ if (messageType === 'track') {
+ return responseBuilderSimple(message, destination);
+ }
+
+ throw new InstrumentationError(`Event type ${messageType} is not supported`);
+};
+
+const process = (event: SingularProcessorRequest) => processEvent(event.message, event.destination);
+
+const processRouterDest = async (
+ inputs: SingularRouterRequest[],
+ reqMetadata: Record,
+) => {
+ const respList = await simpleProcessRouterDest(inputs, process, reqMetadata, {});
+ return respList;
+};
+
+export { process, processRouterDest };
diff --git a/src/v0/destinations/singular/types.ts b/src/v0/destinations/singular/types.ts
new file mode 100644
index 00000000000..641cf4c127b
--- /dev/null
+++ b/src/v0/destinations/singular/types.ts
@@ -0,0 +1,449 @@
+import {
+ Destination,
+ Metadata,
+ RouterTransformationRequestData,
+ RudderMessage,
+} from '../../../types';
+import {
+ BatchedRequest,
+ ProcessorTransformationRequest,
+} from '../../../types/destinationTransformation';
+
+/**
+ * Custom session event configuration from destination settings
+ * Used to determine if an event should be treated as a session event
+ */
+interface SingularSessionEvent {
+ sessionEventName: string;
+}
+
+/**
+ * Singular destination configuration
+ */
+export interface SingularDestinationConfig {
+ /**
+ * Singular SDK Key for API authentication (required)
+ */
+ apiKey: string;
+
+ /**
+ * List of custom session events
+ * Combined with default session events: Application Installed, Application Updated, Application Opened
+ */
+ sessionEventList?: SingularSessionEvent[];
+
+ /**
+ * Match ID source for Unity platforms
+ * Values: 'advertisingId' or custom identifier
+ */
+ match_id?: 'advertisingId' | string;
+}
+
+/**
+ * Product object for e-commerce revenue events
+ */
+export interface SingularProduct {
+ product_id?: string;
+ sku?: string;
+ name?: string;
+ price?: number;
+ quantity?: number;
+ category?: string;
+ url?: string;
+ image_url?: string;
+ currency?: string;
+ purchase_receipt?: string;
+ purchase_transaction_id?: string;
+ receipt_signature?: string;
+}
+
+/**
+ * RudderStack message extended with Singular-specific properties
+ */
+export interface SingularMessage extends RudderMessage {
+ properties?: {
+ // E-commerce revenue fields
+ products?: SingularProduct[];
+ currency?: string;
+ price?: number;
+ quantity?: number;
+ revenue?: number;
+ total?: number;
+ value?: number;
+
+ // Session event fields
+ url?: string;
+ referring_application?: string;
+ install_receipt?: string;
+ asid?: string;
+ install_ref?: string;
+ build?: string;
+ install?: string;
+
+ // Event revenue validation fields
+ purchase_receipt?: string;
+ product_id?: string;
+ sku?: string;
+ purchase_transaction_id?: string;
+ receipt_signature?: string;
+
+ // iOS-specific attribution fields
+ userAgent?: string;
+ attribution_token?: string;
+ skan_conversion_value?: string;
+ skan_first_call_timestamp?: string;
+ skan_last_call_timestamp?: string;
+
+ // Unity platforms
+ match_id?: string;
+ };
+ context?: RudderMessage['context'] & {
+ os?: {
+ name?: string;
+ version?: string;
+ };
+ device?: {
+ advertisingId?: string;
+ adTrackingEnabled?: boolean;
+ };
+ network?: {
+ wifi?: boolean;
+ carrier?: string;
+ };
+ };
+}
+
+/**
+ * Common parameters shared by both SESSION and EVENT endpoints
+ *
+ * Based on Singular S2S API documentation:
+ * - EVENT: https://support.singular.net/hc/en-us/articles/31496864868635
+ * - SESSION: https://support.singular.net/hc/en-us/articles/31394799175963
+ */
+interface SingularCommonParams {
+ // ==================== API Authentication (Required) ====================
+ /** Singular SDK Key for API authentication */
+ a: string;
+
+ // ==================== Platform (Required) ====================
+ /** Platform of the application. Allowed: Android, iOS, Web, PC, Xbox, Playstation, Nintendo, MetaQuest, CTV */
+ p: 'Android' | 'iOS' | 'Web' | 'PC' | 'Xbox' | 'Playstation' | 'Nintendo' | 'MetaQuest' | 'CTV';
+
+ // ==================== Device Identifiers (Platform-specific, at least one required) ====================
+ /** iOS Identifier for Advertisers (IDFA). Required for iOS. Omit if unavailable (user denied ATT). */
+ idfa?: string;
+
+ /** iOS Identifier for Vendors (IDFV). Always required for iOS regardless of ATT status. */
+ idfv?: string;
+
+ /** Android Google Advertising ID (GAID). Required on Google Play devices. */
+ aifa?: string;
+
+ /** Android App Set ID. Always required on Google Play devices. */
+ asid?: string;
+
+ /** Amazon Advertising ID. Required for Amazon Fire devices. */
+ amid?: string;
+
+ /** Open Advertising Identifier (OAID). Required for Chinese OEM devices (Huawei, Xiaomi, OPPO, Vivo). */
+ oaid?: string;
+
+ /** Android ID. Restricted use - only for non-Google Play devices. */
+ andi?: string;
+
+ /** Singular Device ID. Required for Web, PC, Console, CTV platforms. */
+ sdid?: string;
+
+ /** Enterprise-only client-defined identifier. Requires special enablement. */
+ sing?: string;
+
+ // ==================== Device Parameters (Required) ====================
+ /** Public IPv4 IP address of device. IPv6 supported but IPv4 recommended. */
+ ip?: string;
+
+ /** OS version of device at event/session time */
+ ve: string;
+
+ // ==================== Application Parameters (Required) ====================
+ /** App identifier (case-sensitive). Android: Package Name, iOS: Bundle ID, PC/Console: Your identifier */
+ i: string;
+
+ // ==================== iOS-specific Parameters ====================
+ /**
+ * App Tracking Transparency (ATT) status code (iOS 14.5+).
+ * 0=Undetermined, 1=Restricted, 2=Denied, 3=Authorized.
+ * Always required for iOS even if ATT not implemented.
+ */
+ att_authorization_status?: 0 | 1 | 2 | 3;
+
+ // ==================== Optional Device Parameters ====================
+ /** Device make (manufacturer name). Must be used with mo (model). */
+ ma?: string;
+
+ /** Device model. Must be used with ma (make). */
+ mo?: string;
+
+ /** IETF locale tag (e.g., en_US) */
+ lc?: string;
+
+ /** Device build identifier, URL-encoded */
+ bd?: string;
+
+ // ==================== Timestamp Parameters ====================
+ /** 10-digit Unix timestamp */
+ utime?: number;
+
+ /** 13-digit Unix timestamp with milliseconds */
+ umilisec?: number;
+
+ // ==================== Network Parameters ====================
+ /** Instructs Singular to extract IP from HTTP request instead of ip parameter */
+ use_ip?: boolean;
+
+ /** ISO 3166-1 alpha-2 two-letter country code. Required when IP not available or use_ip=true */
+ country?: string;
+
+ // ==================== Data Privacy ====================
+ /** End-user consent for data sharing (object passed directly; serialization handled by other services) */
+ data_sharing_options?: { limit_data_sharing: boolean };
+
+ // ==================== Cross-Device Support ====================
+ /** Your internal user ID for cross-device tracking */
+ custom_user_id?: string;
+
+ // ==================== SKAdNetwork Support (iOS) ====================
+ /** Latest SKAdNetwork conversion value */
+ skan_conversion_value?: number;
+
+ /** Unix timestamp of first call to SKAdNetwork API */
+ skan_first_call_timestamp?: number;
+
+ /** Unix timestamp of most recent call to SKAdNetwork API */
+ skan_last_call_timestamp?: number;
+
+ // ==================== Custom Properties ====================
+ /** JSON URL-encoded object with custom key-value pairs. Max 5 pairs, 200 chars per key/value */
+ global_properties?: string;
+
+ // ==================== Network Connection ====================
+ /** Network connection type: wifi or carrier */
+ c?: 'wifi' | 'carrier';
+
+ // ==================== Unity Platforms ====================
+ /** Match ID for Unity platforms */
+ match_id?: string;
+}
+
+/**
+ * SESSION Endpoint Request Parameters
+ * Ref: https://support.singular.net/hc/en-us/articles/31394799175963
+ * Endpoint: GET https://s2s.singular.net/api/v1/launch
+ */
+export interface SingularSessionParams extends SingularCommonParams {
+ // ==================== Application Parameters (Required) ====================
+ /** Application version */
+ app_v: string;
+
+ /** Indicates if session represents first session after install or reinstall */
+ install: boolean | 'true' | 'false';
+
+ /** Unix timestamp of first app install */
+ install_time?: number;
+
+ /** Unix timestamp of last app update */
+ update_time?: number;
+
+ // ==================== Fraud Prevention Parameters ====================
+ /** Install source package name or store identifier (Android only) */
+ install_source?: string;
+
+ /** Base64-encoded iOS install receipt for fraud validation */
+ install_receipt?: string;
+
+ // ==================== Deep Linking Parameters ====================
+ /** URL-encoded deep link, Universal Link, or App Link that opened the app */
+ openuri?: string;
+
+ /** Indicates if app expects deferred deep link URL in response */
+ ddl_enabled?: boolean | 'true' | 'false';
+
+ /** Requests resolution of Singular short link to long link. Use with openuri */
+ singular_link_resolve_required?: boolean | 'true' | 'false';
+
+ // ==================== Advanced Attribution Parameters ====================
+ /** JSON URL-encoded Google Install Referrer information (Android Google Play) */
+ install_ref?: string;
+
+ /** JSON URL-encoded Meta Install Referrer (Android Google Play). Not recommended if AMM enabled */
+ meta_ref?: string;
+
+ /** Apple Search Ads attribution token from AdServices framework (iOS 14.3+) */
+ attribution_token?: string;
+
+ // ==================== Network Parameters ====================
+ /** URL-encoded User Agent string */
+ ua?: string;
+
+ /** Carrier name of internet provider */
+ cn?: string;
+
+ // ==================== Uninstall Tracking Support ====================
+ /** Hex-encoded Apple Push Notification Service (APNs) device token (iOS) */
+ apns_token?: string;
+
+ /** Firebase Cloud Messaging device token (Android) */
+ fcm?: string;
+
+ // ==================== Data Privacy ====================
+ /** Do Not Track status. 1=enabled (tracking disabled), 0=disabled (tracking enabled) */
+ dnt?: 0 | 1;
+
+ /** Indicates if Do Not Track is OFF. 0=DNT enabled, 1=DNT disabled */
+ dntoff?: 0 | 1;
+
+ // ==================== Google Ads ICM Support (Beta) ====================
+ /** Required for Google Ads Integrated Conversion Measurement (iOS) */
+ odm_info?: string;
+
+ /** Required for Google Ads Integrated Conversion Measurement (iOS) */
+ odm_error?: string;
+
+ // ==================== Legacy Parameters (used in current implementation) ====================
+ /** App name */
+ n?: string;
+
+ /** Session notification name */
+ sessionNotificationName?: string;
+}
+
+/**
+ * EVENT Endpoint Request Parameters
+ * Ref: https://support.singular.net/hc/en-us/articles/31496864868635
+ * Endpoint: GET https://s2s.singular.net/api/v1/evt
+ */
+export interface SingularEventParams extends SingularCommonParams {
+ // ==================== Event Parameters (Required) ====================
+ /** Name of the event being tracked. Maximum 32 ASCII characters */
+ n: string;
+
+ // ==================== Event Attributes ====================
+ /** JSON URL-encoded string specifying custom event attributes. Max 500 ASCII chars per key/value */
+ e?: string | Record;
+
+ // ==================== Revenue Tracking ====================
+ /** Specifies whether event is revenue event. Can be omitted if event name is __iap__ or non-zero amt provided */
+ is_revenue_event?: boolean | 'true' | 'false';
+
+ /** Currency amount of transaction. Use with cur parameter */
+ amt?: number;
+
+ /** ISO 4217 three-letter uppercase currency code. Use with amt parameter */
+ cur?: string;
+
+ // ==================== Revenue Validation Parameters ====================
+ /** Receipt received from purchase transaction (iOS: StoreKit receipt, Android: Google Play Purchase object) */
+ purchase_receipt?: string;
+
+ /** Signature used to sign purchase receipt (Android only) */
+ receipt_signature?: string;
+
+ /** Product SKU identifier */
+ purchase_product_id?: string;
+
+ /** Transaction identifier */
+ purchase_transaction_id?: string;
+
+ // ==================== Legacy Parameters (used in current implementation) ====================
+ /** Product Name/ID */
+ pn?: string;
+
+ /** Product SKU */
+ psku?: string;
+
+ /** Price */
+ prc?: number;
+
+ /** Quantity */
+ q?: number;
+}
+
+/**
+ * Union type representing Singular S2S API request parameters
+ *
+ * - SingularSessionParams: GET /api/v1/launch (SESSION events)
+ * - SingularEventParams: GET /api/v1/evt (EVENT events)
+ */
+export type SingularRequestParams = SingularSessionParams | SingularEventParams;
+
+/**
+ * Payload structure for Singular transformation
+ */
+export interface SingularPayload {
+ payload: SingularRequestParams;
+ eventAttributes?: Record;
+}
+
+/**
+ * Supported platform types (lowercase)
+ */
+export type SingularPlatform =
+ | 'android'
+ | 'ios'
+ | 'pc'
+ | 'xbox'
+ | 'playstation'
+ | 'nintendo'
+ | 'metaquest';
+
+/**
+ * Platform mapping categories
+ * - ANDROID: Android devices
+ * - IOS: iOS/iPadOS/watchOS/tvOS devices
+ * - unity: Unity platforms (PC, Xbox, PlayStation, Nintendo, MetaQuest)
+ */
+export type SingularPlatformMapping = 'ANDROID' | 'IOS' | 'unity';
+
+/**
+ * Event type enumeration
+ * - SESSION: /api/v1/launch endpoint
+ * - EVENT: /api/v1/evt endpoint
+ */
+export type SingularEventType = 'SESSION' | 'EVENT';
+
+export type SingularEndpointObject = {
+ endpoint: string;
+ endpointPath: string;
+};
+
+/**
+ * Destination type with Singular configuration
+ */
+export type SingularDestination = Destination;
+
+/**
+ * Batch request for Singular transformation
+ * GET request with query parameters
+ */
+export type SingularBatchRequest = BatchedRequest<
+ Record,
+ Record,
+ SingularRequestParams
+>;
+
+/**
+ * Processor transformation request
+ */
+export type SingularProcessorRequest = ProcessorTransformationRequest<
+ SingularMessage,
+ Metadata,
+ SingularDestination
+>;
+
+/**
+ * Router transformation request
+ */
+export type SingularRouterRequest = RouterTransformationRequestData<
+ SingularMessage,
+ Metadata,
+ SingularDestination
+>;
diff --git a/src/v0/destinations/singular/util.js b/src/v0/destinations/singular/util.js
deleted file mode 100644
index 61db0472ab4..00000000000
--- a/src/v0/destinations/singular/util.js
+++ /dev/null
@@ -1,173 +0,0 @@
-const lodash = require('lodash');
-const { TransformationError, InstrumentationError } = require('@rudderstack/integrations-lib');
-const {
- CONFIG_CATEGORIES,
- MAPPING_CONFIG,
- SINGULAR_SESSION_ANDROID_EXCLUSION,
- SINGULAR_SESSION_IOS_EXCLUSION,
- SINGULAR_EVENT_ANDROID_EXCLUSION,
- SINGULAR_EVENT_IOS_EXCLUSION,
- BASE_URL,
- SUPPORTED_PLATFORM,
- SUPPORTED_UNTIY_SUBPLATFORMS,
- SESSIONEVENTS,
-} = require('./config');
-const {
- constructPayload,
- defaultRequestConfig,
- defaultGetRequestConfig,
- removeUndefinedAndNullValues,
- extractCustomFields,
- getValueFromMessage,
- isDefinedAndNotNull,
- isAppleFamily,
-} = require('../../util');
-
-/*
- All the fields listed inside properties which are not directly mapped, will be sent to 'e' as custom event attributes
-*/
-const extractExtraFields = (message, EXCLUSION_FIELDS) => {
- const eventAttributes = {};
- extractCustomFields(message, eventAttributes, ['properties'], EXCLUSION_FIELDS);
- return eventAttributes;
-};
-
-/**
- * This function is used to generate the array of individual response for each of the products.
- * @param {*} products contains different products
- * @param {*} payload contains the common payload for each revenue event
- * @param {*} Config destination config
- * @param {*} eventAttributes custom attributes
- * @returns list of revenue event responses
- */
-const generateRevenuePayloadArray = (products, payload, Config, eventAttributes) => {
- const responseArray = [];
- products.forEach((product) => {
- const productDetails = constructPayload(
- product,
- MAPPING_CONFIG[CONFIG_CATEGORIES.PRODUCT_PROPERTY.name],
- );
- let finalpayload = { ...payload, ...productDetails };
- // is_revenue_event will be true as here payload for a REVENUE event is being generated
- finalpayload.is_revenue_event = true;
- finalpayload = removeUndefinedAndNullValues(finalpayload);
- const response = defaultRequestConfig();
- response.endpoint = `${BASE_URL}/evt`;
- response.params = { ...finalpayload, a: Config.apiKey };
- if (eventAttributes) {
- response.params = { ...response.params, e: eventAttributes };
- }
- response.method = defaultGetRequestConfig.requestMethod;
- responseArray.push(response);
- });
- return responseArray;
-};
-
-const exclusionList = {
- ANDROID_SESSION_EXCLUSION_LIST: SINGULAR_SESSION_ANDROID_EXCLUSION,
- IOS_SESSION_EXCLUSION_LIST: SINGULAR_SESSION_IOS_EXCLUSION,
- ANDROID_EVENT_EXCLUSION_LIST: SINGULAR_EVENT_ANDROID_EXCLUSION,
- IOS_EVENT_EXCLUSION_LIST: SINGULAR_EVENT_IOS_EXCLUSION,
-};
-
-/**
- * Determines if the event is a session event or not
- * @param {*} Config
- * @param {*} eventName
- */
-const isSessionEvent = (Config, eventName) => {
- const mappedSessionEvents = lodash.map(Config.sessionEventList, 'sessionEventName');
- return mappedSessionEvents.includes(eventName) || SESSIONEVENTS.includes(eventName.toLowerCase());
-};
-
-/**
- * Based on platform of device this function generates payload for singular API
- * @param {*} message
- * @param {*} sessionEvent
- * @returns
- */
-const platformWisePayloadGenerator = (message, sessionEvent, Config) => {
- let eventAttributes;
- const clonedMessage = { ...message };
- let platform = getValueFromMessage(clonedMessage, 'context.os.name');
- const typeOfEvent = sessionEvent ? 'SESSION' : 'EVENT';
- if (!platform) {
- throw new InstrumentationError('Platform name is missing from context.os.name');
- }
- // checking if the os is one of ios, ipados, watchos, tvos
- if (typeof platform === 'string' && isAppleFamily(platform.toLowerCase())) {
- clonedMessage.context.os.name = 'iOS';
- platform = 'iOS';
- }
- platform = platform.toLowerCase();
- if (!SUPPORTED_PLATFORM[platform] && !SUPPORTED_UNTIY_SUBPLATFORMS[platform]) {
- throw new InstrumentationError(`Platform ${platform} is not supported`);
- }
- let payload;
- if (SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)) {
- payload = constructPayload(
- clonedMessage,
- MAPPING_CONFIG[CONFIG_CATEGORIES[`${typeOfEvent}_UNITY`].name],
- );
- } else {
- payload = constructPayload(
- clonedMessage,
- MAPPING_CONFIG[CONFIG_CATEGORIES[`${typeOfEvent}_${SUPPORTED_PLATFORM[platform]}`].name],
- );
- }
-
- if (!payload) {
- throw new TransformationError(`Failed to Create ${platform} ${typeOfEvent} Payload`);
- }
- if (!SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)) {
- if (sessionEvent) {
- // context.device.adTrackingEnabled = true implies Singular's do not track (dnt)
- // to be 0 and vice-versa.
- const adTrackingEnabled = getValueFromMessage(
- clonedMessage,
- 'context.device.adTrackingEnabled',
- );
- if (adTrackingEnabled === true) {
- payload.dnt = 0;
- } else {
- payload.dnt = 1;
- }
- // by default, the value of openuri and install_source should be "", i.e empty string if nothing is passed
- payload.openuri = clonedMessage.properties.url || '';
- if (platform === 'android' || platform === 'Android') {
- payload.install_source = clonedMessage.properties.referring_application || '';
- }
- } else {
- // Custom Attribues is not supported by session events
- eventAttributes = extractExtraFields(
- clonedMessage,
- exclusionList[`${SUPPORTED_PLATFORM[platform]}_${typeOfEvent}_EXCLUSION_LIST`],
- );
- eventAttributes = removeUndefinedAndNullValues(eventAttributes);
-
- // If anyone out of value, revenue, total is set,we will have amt in payload
- // and we will consider the event as revenue event.
- if (!isDefinedAndNotNull(payload.is_revenue_event) && payload.amt) {
- payload.is_revenue_event = true;
- }
- }
-
- // Singular maps Connection Type to either wifi or carrier
- if (clonedMessage.context?.network?.wifi) {
- payload.c = 'wifi';
- } else {
- payload.c = 'carrier';
- }
- } else if (Config.match_id === 'advertisingId') {
- payload.match_id = clonedMessage?.context?.device?.advertisingId;
- } else if (message.properties.match_id) {
- payload.match_id = message.properties.match_id;
- }
- return { payload, eventAttributes };
-};
-
-module.exports = {
- generateRevenuePayloadArray,
- isSessionEvent,
- platformWisePayloadGenerator,
-};
diff --git a/src/v0/destinations/singular/util.ts b/src/v0/destinations/singular/util.ts
new file mode 100644
index 00000000000..6ec0444f53e
--- /dev/null
+++ b/src/v0/destinations/singular/util.ts
@@ -0,0 +1,404 @@
+import {
+ TransformationError,
+ InstrumentationError,
+ isEmptyObject,
+} from '@rudderstack/integrations-lib';
+import {
+ BASE_URL,
+ CONFIG_CATEGORIES,
+ CONFIG_CATEGORIES_V2,
+ MAPPING_CONFIG,
+ MAPPING_CONFIG_V2,
+ SINGULAR_SESSION_ANDROID_EXCLUSION,
+ SINGULAR_SESSION_IOS_EXCLUSION,
+ SINGULAR_EVENT_ANDROID_EXCLUSION,
+ SINGULAR_EVENT_IOS_EXCLUSION,
+ SUPPORTED_PLATFORM,
+ SUPPORTED_UNTIY_SUBPLATFORMS,
+ SESSIONEVENTS,
+ SINGULAR_V2_EVENT_ATTRIBUTES_EXCLUDED_KEYS,
+ PARTNER_OBJECT,
+ SESSION_ENDPOINT_PATH_V1,
+ EVENT_ENDPOINT_PATH_V2,
+ EVENT_ENDPOINT_PATH_V1,
+} from './config';
+import {
+ constructPayload,
+ defaultRequestConfig,
+ defaultGetRequestConfig,
+ removeUndefinedAndNullValues,
+ extractCustomFields,
+ getIntegrationsObj,
+ getValueFromMessage,
+ isDefinedAndNotNull,
+ isAppleFamily,
+} from '../../util';
+import type {
+ SingularMessage,
+ SingularDestinationConfig,
+ SingularProduct,
+ SingularEventType,
+ SingularPlatform,
+ SingularBatchRequest,
+ SingularRequestParams,
+ SingularSessionParams,
+ SingularEventParams,
+ SingularEndpointObject,
+} from './types';
+
+/**
+ * Extracts custom event attributes from message properties
+ * All fields in properties not directly mapped will be sent as custom event attributes
+ * @param message - RudderStack message
+ * @param EXCLUSION_FIELDS - Fields to exclude from extraction
+ * @returns Custom event attributes
+ */
+const extractExtraFields = (
+ message: SingularMessage,
+ EXCLUSION_FIELDS: readonly string[],
+): Record => {
+ const eventAttributes: Record = {};
+ extractCustomFields(message, eventAttributes, ['properties'], EXCLUSION_FIELDS);
+ return eventAttributes;
+};
+
+/**
+ * Generates an array of individual responses for each product in a revenue event
+ * @param products - Array of products
+ * @param payload - Common payload for each revenue event
+ * @param Config - Destination configuration
+ * @param eventAttributes - Optional custom event attributes
+ * @param eventEndpoint - Endpoint for event requests (caller passes BASE_URL_V1/evt or BASE_URL_V2/evt)
+ * @returns Array of revenue event batch requests
+ */
+const generateRevenuePayloadArray = (
+ products: SingularProduct[],
+ payload: SingularRequestParams,
+ Config: SingularDestinationConfig,
+ { endpoint, endpointPath }: SingularEndpointObject,
+): SingularBatchRequest[] =>
+ products.map((product) => {
+ const productDetails = constructPayload(
+ product,
+ MAPPING_CONFIG[CONFIG_CATEGORIES.PRODUCT_PROPERTY.name],
+ );
+ const finalPayload = removeUndefinedAndNullValues({
+ ...payload,
+ ...productDetails,
+ ...PARTNER_OBJECT,
+ a: Config.apiKey,
+ is_revenue_event: true,
+ }) as SingularEventParams;
+ return {
+ ...defaultRequestConfig(),
+ endpoint,
+ endpointPath,
+ params: finalPayload,
+ method: defaultGetRequestConfig.requestMethod,
+ };
+ });
+
+const exclusionList: Record = {
+ ANDROID_SESSION_EXCLUSION_LIST: SINGULAR_SESSION_ANDROID_EXCLUSION,
+ IOS_SESSION_EXCLUSION_LIST: SINGULAR_SESSION_IOS_EXCLUSION,
+ ANDROID_EVENT_EXCLUSION_LIST: SINGULAR_EVENT_ANDROID_EXCLUSION,
+ IOS_EVENT_EXCLUSION_LIST: SINGULAR_EVENT_IOS_EXCLUSION,
+};
+
+/**
+ * Reads integrations.Singular.singularDeviceId from the message.
+ * Used for V2 event API version selection and sdid query param.
+ */
+const getSingularDeviceIdFromMessage = (message: SingularMessage): string | undefined => {
+ const integrationsObj = getIntegrationsObj(message, 'singular' as any);
+ const singularDeviceId = integrationsObj?.singularDeviceId;
+ return typeof singularDeviceId === 'string' && singularDeviceId.length > 0
+ ? singularDeviceId
+ : undefined;
+};
+
+/**
+ * True when the customer sends integrations.Singular.singularDeviceId (use V2 event API).
+ * Used only for non-session events; session events always use V1 launch.
+ */
+const shouldUseV2EventApi = (message: SingularMessage): boolean =>
+ getSingularDeviceIdFromMessage(message) !== undefined;
+
+/**
+ * Determines if the event is a session event
+ * @param Config - Destination configuration
+ * @param eventName - Event name to check
+ * @returns True if event is a session event, false otherwise
+ */
+const isSessionEvent = (Config: SingularDestinationConfig, eventName: string): boolean => {
+ const mappedSessionEvents = Config.sessionEventList?.map((item) => item.sessionEventName) ?? [];
+ return mappedSessionEvents.includes(eventName) || SESSIONEVENTS.includes(eventName.toLowerCase());
+};
+
+/**
+ * Reads integrations.Singular.limitDataSharing and returns data_sharing_options when it is a boolean.
+ * Used for both /launch and /evt API requests.
+ * @param message - RudderStack message
+ * @returns data_sharing_options object when limitDataSharing is boolean, otherwise undefined
+ */
+const getDataSharingOptionsFromMessage = (
+ message: SingularMessage,
+): { limit_data_sharing: boolean } | undefined => {
+ const integrationsObj = getIntegrationsObj(message, 'singular' as any);
+ const limitDataSharing = integrationsObj?.limitDataSharing;
+ if (typeof limitDataSharing === 'boolean') {
+ return { limit_data_sharing: limitDataSharing };
+ }
+ return undefined;
+};
+
+/**
+ * Builds base payload using platform-specific mapping configuration
+ * @param message - RudderStack message
+ * @param platform - Platform identifier (lowercased)
+ * @param eventType - 'SESSION' or 'EVENT'
+ * @returns Base payload from constructPayload
+ * @throws TransformationError if payload creation fails
+ */
+const buildBasePayload = (
+ message: SingularMessage,
+ platform: SingularPlatform,
+ eventType: SingularEventType,
+): Record => {
+ const configKey = SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)
+ ? CONFIG_CATEGORIES[`${eventType}_UNITY`].name
+ : CONFIG_CATEGORIES[`${eventType}_${SUPPORTED_PLATFORM[platform]}`].name;
+
+ const basePayload: Record | null = constructPayload(
+ message,
+ MAPPING_CONFIG[configKey],
+ ) as Record | null;
+
+ if (!basePayload) {
+ throw new TransformationError(`Failed to Create ${platform} ${eventType} Payload`);
+ }
+
+ const dataSharingOptions = getDataSharingOptionsFromMessage(message);
+ return {
+ ...basePayload,
+ ...(dataSharingOptions && { data_sharing_options: dataSharingOptions }),
+ };
+};
+
+/**
+ * Builds base payload for V2 event API using data/v2 mapping configs (no platform device ids; sdid from integration options).
+ */
+const buildBasePayloadV2 = (
+ message: SingularMessage,
+ platform: SingularPlatform,
+): Record => {
+ const configKey = SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)
+ ? CONFIG_CATEGORIES_V2.EVENT_UNITY.name
+ : CONFIG_CATEGORIES_V2[`EVENT_${SUPPORTED_PLATFORM[platform]}`].name;
+
+ const basePayload: Record | null = constructPayload(
+ message,
+ MAPPING_CONFIG_V2[configKey],
+ ) as Record | null;
+
+ if (!basePayload) {
+ throw new TransformationError(`Failed to Create ${platform} V2 EVENT Payload`);
+ }
+
+ const dataSharingOptions = getDataSharingOptionsFromMessage(message);
+ return {
+ ...basePayload,
+ ...(dataSharingOptions && { data_sharing_options: dataSharingOptions }),
+ };
+};
+
+/**
+ * Computes match_id value based on configuration and message
+ * Used for Unity platforms
+ * @param message - RudderStack message
+ * @param Config - Destination configuration
+ * @returns match_id value or undefined
+ */
+const getMatchObject = (
+ message: SingularMessage,
+ Config: SingularDestinationConfig,
+): { match_id: string } | undefined => {
+ if (Config.match_id === 'advertisingId' && message?.context?.device?.advertisingId) {
+ return { match_id: message?.context?.device?.advertisingId };
+ }
+ if (message.properties?.match_id) {
+ return { match_id: message.properties.match_id };
+ }
+ return undefined;
+};
+
+/**
+ * Determines connection type based on message context
+ * @param message - RudderStack message
+ * @returns 'wifi' if network.wifi is true, otherwise 'carrier'
+ */
+const getConnectionType = (message: SingularMessage): 'wifi' | 'carrier' =>
+ message.context?.network?.wifi ? 'wifi' : 'carrier';
+
+/**
+ * Creates a SESSION payload with session-specific parameters
+ * @param message - RudderStack message
+ * @param platform - Platform identifier (lowercased)
+ * @param Config - Destination configuration
+ * @returns SESSION payload conforming to SingularSessionParams
+ */
+const createSessionPayload = (
+ message: SingularMessage,
+ platform: SingularPlatform,
+ Config: SingularDestinationConfig,
+): SingularSessionParams => {
+ const payload = buildBasePayload(
+ message,
+ platform,
+ 'SESSION',
+ ) as unknown as SingularSessionParams;
+
+ if (!SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)) {
+ // context.device.adTrackingEnabled = true implies Singular's do not track (dnt) to be 0 and vice-versa.
+ const adTrackingEnabled = getValueFromMessage(message, 'context.device.adTrackingEnabled');
+
+ return {
+ ...payload,
+ ...(platform === 'android' && {
+ install_source: message.properties?.referring_application || '',
+ }),
+ dnt: adTrackingEnabled === true ? 0 : 1,
+ openuri: message.properties?.url || '',
+ c: getConnectionType(message),
+ };
+ }
+
+ const matchObject = getMatchObject(message, Config);
+ return { ...payload, ...matchObject };
+};
+
+/**
+ * Creates an EVENT payload with event-specific parameters and custom attributes (V1 API).
+ */
+const createEventPayload = (
+ message: SingularMessage,
+ platform: SingularPlatform,
+ Config: SingularDestinationConfig,
+): SingularEventParams => {
+ const payload = buildBasePayload(message, platform, 'EVENT') as unknown as SingularEventParams;
+
+ if (!SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)) {
+ const eventAttributes = removeUndefinedAndNullValues(
+ extractExtraFields(
+ message,
+ exclusionList[`${SUPPORTED_PLATFORM[platform]}_EVENT_EXCLUSION_LIST`],
+ ),
+ );
+ return {
+ ...payload,
+ c: getConnectionType(message),
+ ...(!isDefinedAndNotNull(payload.is_revenue_event) &&
+ payload.amt && { is_revenue_event: true }),
+ ...(!isEmptyObject(eventAttributes) && { e: eventAttributes }),
+ };
+ }
+ const matchObject = getMatchObject(message, Config);
+ return { ...payload, ...matchObject };
+};
+
+/**
+ * Creates a V2 EVENT payload using data/v2 mapping (no platform device ids; sdid from integration options).
+ * Event attributes (e) exclude singularDeviceId. match_id is preserved for Unity platforms (same as V1).
+ */
+const createV2EventPayload = (
+ message: SingularMessage,
+ platform: SingularPlatform,
+ Config: SingularDestinationConfig,
+): SingularEventParams => {
+ const basePayload = buildBasePayloadV2(message, platform);
+ const sdid = getSingularDeviceIdFromMessage(message);
+ const payload = { ...basePayload, ...(sdid && { sdid }) } as unknown as SingularEventParams;
+
+ if (!SUPPORTED_UNTIY_SUBPLATFORMS.includes(platform)) {
+ const v2Exclusion = [
+ ...exclusionList[`${SUPPORTED_PLATFORM[platform]}_EVENT_EXCLUSION_LIST`],
+ ...SINGULAR_V2_EVENT_ATTRIBUTES_EXCLUDED_KEYS,
+ ];
+ const eventAttributes = removeUndefinedAndNullValues(extractExtraFields(message, v2Exclusion));
+
+ return {
+ ...payload,
+ c: getConnectionType(message),
+ ...(!isDefinedAndNotNull(payload.is_revenue_event) &&
+ payload.amt && { is_revenue_event: true }),
+ ...(!isEmptyObject(eventAttributes) && { e: eventAttributes }),
+ };
+ }
+ const matchObject = getMatchObject(message, Config);
+ return { ...payload, ...matchObject };
+};
+
+/**
+ * Generates platform-specific payload for Singular API
+ * Handles both SESSION and EVENT payloads with appropriate parameters
+ * @param message - RudderStack message
+ * @param sessionEvent - Whether this is a session event
+ * @param Config - Destination configuration
+ * @returns Payload and optional event attributes for Singular API
+ * @throws InstrumentationError if platform is missing or unsupported
+ */
+const platformWisePayloadGenerator = (
+ message: SingularMessage,
+ sessionEvent: boolean,
+ Config: SingularDestinationConfig,
+): SingularEventParams | SingularSessionParams => {
+ const clonedMessage: SingularMessage = { ...message };
+ const contextOsName = getValueFromMessage(message, 'context.os.name');
+ if (!contextOsName || typeof contextOsName !== 'string') {
+ throw new InstrumentationError('Platform name is missing from context.os.name');
+ }
+
+ // checking if the os is one of ios, ipados, watchos, tvos
+ const isAppleOs = isAppleFamily(contextOsName.toLowerCase());
+ const normalizedOsName = isAppleOs ? 'iOS' : contextOsName;
+
+ clonedMessage.context!.os!.name = normalizedOsName;
+
+ const platform = normalizedOsName.toLowerCase() as SingularPlatform;
+ if (!SUPPORTED_PLATFORM[platform]) {
+ throw new InstrumentationError(`Platform ${platform} is not supported`);
+ }
+
+ if (sessionEvent) {
+ return createSessionPayload(clonedMessage, platform, Config);
+ }
+
+ if (shouldUseV2EventApi(clonedMessage)) {
+ return createV2EventPayload(clonedMessage, platform, Config);
+ }
+
+ return createEventPayload(clonedMessage, platform, Config);
+};
+
+/**
+ * Returns the Singular API endpoint for the given request type.
+ */
+const getEndpoint = (message: SingularMessage, sessionEvent: boolean): SingularEndpointObject => {
+ if (sessionEvent) {
+ return {
+ endpoint: `${BASE_URL}${SESSION_ENDPOINT_PATH_V1}`,
+ endpointPath: SESSION_ENDPOINT_PATH_V1,
+ };
+ }
+ return shouldUseV2EventApi(message)
+ ? {
+ endpoint: `${BASE_URL}${EVENT_ENDPOINT_PATH_V2}`,
+ endpointPath: EVENT_ENDPOINT_PATH_V2,
+ }
+ : {
+ endpoint: `${BASE_URL}${EVENT_ENDPOINT_PATH_V1}`,
+ endpointPath: EVENT_ENDPOINT_PATH_V1,
+ };
+};
+
+export { generateRevenuePayloadArray, getEndpoint, isSessionEvent, platformWisePayloadGenerator };
diff --git a/src/v0/destinations/tiktok_audience/config.ts b/src/v0/destinations/tiktok_audience/config.ts
new file mode 100644
index 00000000000..534db8d8d8c
--- /dev/null
+++ b/src/v0/destinations/tiktok_audience/config.ts
@@ -0,0 +1,18 @@
+import { EVENT_TYPES } from '../../util/recordUtils';
+
+export const ACTION_MAP: Record = {
+ add: 'add',
+ remove: 'delete',
+};
+
+export const ACTION_RECORD_MAP: Record = {
+ [EVENT_TYPES.INSERT]: 'add',
+ [EVENT_TYPES.UPDATE]: 'add',
+ [EVENT_TYPES.DELETE]: 'delete',
+};
+
+export const SHA256_TRAITS = ['IDFA_SHA256', 'AAID_SHA256', 'EMAIL_SHA256', 'PHONE_SHA256'];
+
+export const BASE_URL = 'https://business-api.tiktok.com/open_api/v1.3';
+export const ENDPOINT_PATH = '/segment/mapping/';
+export const ENDPOINT = `${BASE_URL}${ENDPOINT_PATH}`;
diff --git a/src/v0/destinations/tiktok_audience/recordTransform.ts b/src/v0/destinations/tiktok_audience/recordTransform.ts
new file mode 100644
index 00000000000..9e8c736dfb4
--- /dev/null
+++ b/src/v0/destinations/tiktok_audience/recordTransform.ts
@@ -0,0 +1,142 @@
+import md5 from 'md5';
+import { hashToSha256, InstrumentationError, formatZodError } from '@rudderstack/integrations-lib';
+import type {
+ TiktokAudienceRecordRequest,
+ IdentifiersPayload,
+ Identifier,
+ SegmentMappingPayload,
+ ProcessTiktokAudienceRecordsResponse,
+} from './recordTypes';
+import { TiktokAudienceRecordRouterRequestSchema } from './recordTypes';
+import { SHA256_TRAITS, ENDPOINT, ENDPOINT_PATH, ACTION_RECORD_MAP } from './config';
+import { defaultRequestConfig, getSuccessRespEvents, handleRtTfSingleEventError } from '../../util';
+
+function prepareIdentifiersPayload(event: TiktokAudienceRecordRequest): IdentifiersPayload {
+ const { message, connection, destination } = event;
+ const { isHashRequired, audienceId } = connection.config.destination;
+ const { advertiserId } = destination.Config;
+ const { action, identifiers } = message;
+
+ const hashIdentifier = (fieldName: string, value: string) => {
+ if (isHashRequired) {
+ if (SHA256_TRAITS.includes(fieldName)) {
+ return hashToSha256(value);
+ }
+ return md5(value);
+ }
+ return value;
+ };
+
+ const identifiersList: Identifier[] = [];
+ for (const [fieldName, value] of Object.entries(identifiers)) {
+ if (value) {
+ identifiersList.push({
+ id: hashIdentifier(fieldName, value),
+ audience_ids: [audienceId],
+ });
+ }
+ }
+
+ const payload: IdentifiersPayload = {
+ event,
+ batchIdentifiers: identifiersList,
+ idSchema: Object.keys(identifiers).sort(),
+ advertiserId,
+ action: ACTION_RECORD_MAP[action],
+ };
+ return payload;
+}
+
+function prepareSegmentMappingRequest(
+ payload: SegmentMappingPayload,
+ event: TiktokAudienceRecordRequest,
+) {
+ const accessToken = event.metadata?.secret?.accessToken;
+ const userId = event.message?.userId;
+
+ const response = defaultRequestConfig();
+ response.body.JSON = payload;
+ response.userId = userId;
+ response.endpoint = ENDPOINT;
+ response.endpointPath = ENDPOINT_PATH;
+ response.headers = {
+ 'Access-Token': accessToken,
+ 'Content-Type': 'application/json',
+ };
+ return response;
+}
+
+function validateAudienceRecordEvent(event: unknown): TiktokAudienceRecordRequest {
+ const result = TiktokAudienceRecordRouterRequestSchema.safeParse(event);
+ if (!result.success) {
+ throw new InstrumentationError(formatZodError(result.error));
+ }
+ return result.data;
+}
+
+function processTiktokAudienceRecords(events: unknown[]): ProcessTiktokAudienceRecordsResponse {
+ const recordResponse: ProcessTiktokAudienceRecordsResponse = {
+ failedResponses: [],
+ successfulResponses: [],
+ };
+ const groupedPayloads: {
+ advertiserId: string;
+ action: string;
+ idSchema: string[];
+ payloads: IdentifiersPayload[];
+ }[] = [];
+
+ for (const event of events) {
+ try {
+ const recordEvent = validateAudienceRecordEvent(event);
+ const identifiersPayload = prepareIdentifiersPayload(recordEvent);
+
+ const existingGroup = groupedPayloads.find(
+ (group) =>
+ group.advertiserId === identifiersPayload.advertiserId &&
+ group.action === identifiersPayload.action &&
+ group.idSchema.length === identifiersPayload.idSchema.length &&
+ group.idSchema.every((field, index) => field === identifiersPayload.idSchema[index]),
+ );
+
+ if (existingGroup) {
+ existingGroup.payloads.push(identifiersPayload);
+ } else {
+ groupedPayloads.push({
+ advertiserId: identifiersPayload.advertiserId,
+ action: identifiersPayload.action,
+ idSchema: identifiersPayload.idSchema,
+ payloads: [identifiersPayload],
+ });
+ }
+ } catch (error) {
+ recordResponse.failedResponses.push(handleRtTfSingleEventError(event, error, {}));
+ }
+ }
+
+ for (const group of groupedPayloads) {
+ try {
+ const batchIdentifiers = group.payloads.map((payload) => payload.batchIdentifiers);
+ const metadataList = group.payloads.map((payload) => payload.event.metadata);
+
+ const payload: SegmentMappingPayload = {
+ batch_data: batchIdentifiers,
+ id_schema: group.idSchema,
+ advertiser_ids: [group.advertiserId],
+ action: group.action,
+ };
+ const response = prepareSegmentMappingRequest(payload, group.payloads[0].event);
+
+ recordResponse.successfulResponses.push(
+ getSuccessRespEvents(response, metadataList, group.payloads[0].event.destination, true),
+ );
+ } catch (error) {
+ recordResponse.failedResponses.push(
+ ...group.payloads.map((payload) => handleRtTfSingleEventError(payload.event, error, {})),
+ );
+ }
+ }
+ return recordResponse;
+}
+
+export { processTiktokAudienceRecords };
diff --git a/src/v0/destinations/tiktok_audience/recordTypes.ts b/src/v0/destinations/tiktok_audience/recordTypes.ts
new file mode 100644
index 00000000000..f98e51139d1
--- /dev/null
+++ b/src/v0/destinations/tiktok_audience/recordTypes.ts
@@ -0,0 +1,87 @@
+import { z } from 'zod';
+import { RouterTransformationResponse } from '../../../types';
+
+const TiktokAudienceDestinationSchema = z
+ .object({
+ Config: z
+ .object({
+ advertiserId: z.string(),
+ })
+ .passthrough(),
+ })
+ .passthrough();
+
+const TiktokAudienceConnectionSchema = z
+ .object({
+ config: z
+ .object({
+ destination: z
+ .object({
+ isHashRequired: z.boolean(),
+ audienceId: z.string(),
+ })
+ .passthrough(),
+ })
+ .passthrough(),
+ })
+ .passthrough();
+
+const TiktokAudienceMessageSchema = z
+ .object({
+ type: z.enum(['record'], {
+ required_error: 'message Type is not present. Aborting message.',
+ }),
+ action: z.enum(['insert', 'delete', 'update'], {
+ required_error: 'action is not present. Aborting message.',
+ }),
+ userId: z.string().optional(),
+ identifiers: z.record(z.string(), z.string().nullable()),
+ fields: z.record(z.string(), z.string().nullable()),
+ })
+ .passthrough();
+
+const TiktokAudienceMetadataSchema = z
+ .object({
+ secret: z
+ .object({
+ accessToken: z.string(),
+ })
+ .passthrough(),
+ })
+ .passthrough();
+
+export const TiktokAudienceRecordRouterRequestSchema = z
+ .object({
+ message: TiktokAudienceMessageSchema,
+ destination: TiktokAudienceDestinationSchema,
+ connection: TiktokAudienceConnectionSchema,
+ metadata: TiktokAudienceMetadataSchema,
+ })
+ .passthrough();
+
+export type ProcessTiktokAudienceRecordsResponse = {
+ failedResponses: RouterTransformationResponse[];
+ successfulResponses: RouterTransformationResponse[];
+};
+
+export type Identifier = {
+ id: string;
+ audience_ids: string[];
+};
+
+export type IdentifiersPayload = {
+ event: TiktokAudienceRecordRequest;
+ batchIdentifiers: Identifier[];
+ idSchema: string[];
+ advertiserId: string;
+ action: string;
+};
+
+export type SegmentMappingPayload = {
+ batch_data: Identifier[][];
+ id_schema: string[];
+ advertiser_ids: string[];
+ action: string;
+};
+
+export type TiktokAudienceRecordRequest = z.infer;
diff --git a/src/v0/destinations/tiktok_audience/transform.ts b/src/v0/destinations/tiktok_audience/transform.ts
new file mode 100644
index 00000000000..bd80819d520
--- /dev/null
+++ b/src/v0/destinations/tiktok_audience/transform.ts
@@ -0,0 +1,156 @@
+import md5 from 'md5';
+import {
+ hashToSha256,
+ InstrumentationError,
+ formatZodError,
+ groupByInBatches,
+} from '@rudderstack/integrations-lib';
+import type { RouterTransformationResponse } from '../../../types';
+import type { TiktokAudienceListRequest } from './types';
+import { TiktokAudienceListRouterRequestSchema } from './types';
+import { SHA256_TRAITS, ACTION_MAP, ENDPOINT, ENDPOINT_PATH } from './config';
+import {
+ defaultRequestConfig,
+ getDestinationExternalIDInfoForRetl,
+ getSuccessRespEvents,
+ handleRtTfSingleEventError,
+} from '../../util';
+import { processTiktokAudienceRecords } from './recordTransform';
+import { ProcessTiktokAudienceRecordsResponse, TiktokAudienceRecordRequest } from './recordTypes';
+
+function prepareIdentifiersList(event: TiktokAudienceListRequest) {
+ const { message, destination, metadata } = event;
+ const { isHashRequired } = destination.Config;
+
+ const destinationFields =
+ message.context?.destinationFields
+ ?.split(',')
+ .map((s) => s.trim())
+ .filter(Boolean) ?? [];
+ const audienceId = getDestinationExternalIDInfoForRetl(message, 'TIKTOK_AUDIENCE').objectType;
+ const advertiserIds = metadata?.secret?.advertiserIds;
+ const hashIdentifier = (destinationField: string, trait: string) => {
+ if (isHashRequired) {
+ if (SHA256_TRAITS.includes(destinationField)) {
+ return hashToSha256(trait);
+ }
+ return md5(trait);
+ }
+ return trait;
+ };
+
+ const hashTraits = (traits: Record[]) =>
+ traits.map((trait) =>
+ destinationFields.map((destinationField) =>
+ trait[destinationField]
+ ? {
+ id: hashIdentifier(destinationField, trait[destinationField]!),
+ audience_ids: [audienceId],
+ }
+ : {},
+ ),
+ );
+
+ const listData = message.properties!.listData!;
+ const actions = Object.keys(listData);
+
+ return actions.map((action) => ({
+ batch_data: hashTraits(listData[action]),
+ id_schema: destinationFields,
+ advertiser_ids: advertiserIds,
+ action: ACTION_MAP[action],
+ }));
+}
+
+function buildResponseForProcessTransformation(
+ identifiersList: any[],
+ event: TiktokAudienceListRequest,
+) {
+ const accessToken = event.metadata?.secret?.accessToken;
+ const anonymousId = event.message?.anonymousId;
+
+ const responses = identifiersList.map((identifierList) => {
+ const response = defaultRequestConfig();
+ response.body.JSON = identifierList;
+ response.userId = anonymousId;
+ response.endpoint = ENDPOINT;
+ response.endpointPath = ENDPOINT_PATH;
+ response.headers = {
+ 'Access-Token': accessToken,
+ 'Content-Type': 'application/json',
+ };
+ return response;
+ });
+ if (responses.length === 1) {
+ return responses[0];
+ }
+ return responses;
+}
+
+function validateAudienceListEvent(event: unknown) {
+ const result = TiktokAudienceListRouterRequestSchema.safeParse(event);
+ if (!result.success) {
+ throw new InstrumentationError(formatZodError(result.error));
+ }
+ return result.data;
+}
+
+function processTiktokAudienceList(event: TiktokAudienceListRequest) {
+ const identifierLists = prepareIdentifiersList(event);
+ return buildResponseForProcessTransformation(identifierLists, event);
+}
+
+const processRouterDest = async (
+ events: (TiktokAudienceListRequest | TiktokAudienceRecordRequest)[],
+): Promise => {
+ if (!events || events.length === 0) return [];
+
+ const groupedEvents = await groupByInBatches<
+ TiktokAudienceListRequest | TiktokAudienceRecordRequest,
+ string
+ >(events, (event) => event.message?.type?.toLowerCase());
+
+ const supportedEventTypes = ['record', 'audiencelist'];
+ const eventTypes = Object.keys(groupedEvents);
+ const unsupportedEventList = eventTypes.filter(
+ (eventType) => !supportedEventTypes.includes(eventType),
+ );
+
+ const failedResponses: RouterTransformationResponse[] = [];
+ const successfulResponses: RouterTransformationResponse[] = [];
+
+ if (groupedEvents.record) {
+ const response: ProcessTiktokAudienceRecordsResponse = processTiktokAudienceRecords(
+ groupedEvents.record,
+ );
+ failedResponses.push(...response.failedResponses);
+ successfulResponses.push(...response.successfulResponses);
+ }
+ if (groupedEvents.audiencelist) {
+ for (const event of groupedEvents.audiencelist) {
+ try {
+ const tiktokEvent = validateAudienceListEvent(event);
+ const response = processTiktokAudienceList(tiktokEvent);
+ successfulResponses.push(
+ getSuccessRespEvents(response, [tiktokEvent.metadata], tiktokEvent.destination, true),
+ );
+ } catch (error) {
+ failedResponses.push(handleRtTfSingleEventError(event, error, {}));
+ }
+ }
+ }
+ for (const unsupportedEvent of unsupportedEventList) {
+ for (const event of groupedEvents[unsupportedEvent]) {
+ failedResponses.push(
+ handleRtTfSingleEventError(
+ event,
+ new InstrumentationError(`unsupported event found ${unsupportedEvent}`),
+ {},
+ ),
+ );
+ }
+ }
+ return [...failedResponses, ...successfulResponses];
+};
+
+export { processRouterDest };
diff --git a/src/v0/destinations/tiktok_audience/types.ts b/src/v0/destinations/tiktok_audience/types.ts
new file mode 100644
index 00000000000..5a509a43dae
--- /dev/null
+++ b/src/v0/destinations/tiktok_audience/types.ts
@@ -0,0 +1,81 @@
+import { z } from 'zod';
+import { ACTION_MAP } from './config';
+
+const TiktokAudienceDestinationSchema = z
+ .object({
+ Config: z
+ .object({
+ isHashRequired: z.boolean(),
+ })
+ .passthrough(),
+ })
+ .passthrough();
+
+const TiktokAudienceMessageSchema = z
+ .object({
+ type: z.enum(['audienceList'], {
+ required_error: 'message Type is not present. Aborting message.',
+ }),
+ anonymousId: z.string().optional(),
+ properties: z
+ .object({
+ listData: z
+ .record(z.array(z.record(z.string(), z.string().nullable())))
+ .optional()
+ .superRefine((val, ctx) => {
+ if (!val) {
+ ctx.addIssue({
+ code: z.ZodIssueCode.custom,
+ message: 'listData is not present inside properties. Aborting message.',
+ });
+ }
+ for (const key of Object.keys(val ?? {})) {
+ if (!Object.keys(ACTION_MAP).includes(key)) {
+ ctx.addIssue({
+ code: z.ZodIssueCode.custom,
+ message: `unsupported action type ${key}. Aborting message.`,
+ });
+ break;
+ }
+ }
+ }),
+ })
+ .optional()
+ .refine((val) => val !== undefined, {
+ message: 'Message properties is not present. Aborting message.',
+ }),
+ context: z
+ .object({
+ destinationFields: z.string().optional(),
+ externalId: z
+ .array(
+ z.object({
+ type: z.string(),
+ }),
+ )
+ .optional(),
+ })
+ .optional(),
+ })
+ .passthrough();
+
+const TiktokAudienceMetadataSchema = z
+ .object({
+ secret: z
+ .object({
+ advertiserIds: z.array(z.string()),
+ accessToken: z.string(),
+ })
+ .passthrough(),
+ })
+ .passthrough();
+
+export const TiktokAudienceListRouterRequestSchema = z
+ .object({
+ message: TiktokAudienceMessageSchema,
+ destination: TiktokAudienceDestinationSchema,
+ metadata: TiktokAudienceMetadataSchema,
+ })
+ .passthrough();
+
+export type TiktokAudienceListRequest = z.infer;
diff --git a/src/v0/util/audienceUtils.ts b/src/v0/util/audienceUtils.ts
new file mode 100644
index 00000000000..03c1e63b311
--- /dev/null
+++ b/src/v0/util/audienceUtils.ts
@@ -0,0 +1,61 @@
+import { InstrumentationError } from '@rudderstack/integrations-lib';
+import stats from '../../util/stats';
+
+const HASHED_VALUE_REGEX = /^[\da-f]{64}$/;
+
+interface AudienceDestination {
+ workspaceId: string;
+ id: string;
+ type: string;
+ config: {
+ isHashRequired: boolean;
+ };
+}
+
+function isHashingValidationEnabled(): boolean {
+ return process.env.AUDIENCE_HASHING_VALIDATION_ENABLED === 'true';
+}
+
+/**
+ * Validates that the hashing configuration is consistent with the actual data.
+ * Emits a metric when inconsistency is detected.
+ * Optionally throws an error when validation is enabled via env var AUDIENCE_HASHING_VALIDATION_ENABLED.
+ */
+export const validateHashingConsistency = (
+ propertyName: string,
+ normalizedValue: string,
+ destination: AudienceDestination,
+): void => {
+ if (!normalizedValue) return;
+ const { workspaceId, id: destinationId, type: destType, config } = destination;
+ const { isHashRequired } = config;
+ const isAlreadyHashed = HASHED_VALUE_REGEX.test(normalizedValue);
+ if (isHashRequired && isAlreadyHashed) {
+ stats.increment('audience_hashing_inconsistency', {
+ propertyName,
+ type: 'hashed_when_hash_enabled',
+ workspaceId,
+ destinationId,
+ destType,
+ });
+ if (isHashingValidationEnabled()) {
+ throw new InstrumentationError(
+ `Hashing is enabled but the value for field ${propertyName} appears to already be hashed. Either disable hashing or send unhashed data.`,
+ );
+ }
+ }
+ if (!isHashRequired && !isAlreadyHashed) {
+ stats.increment('audience_hashing_inconsistency', {
+ propertyName,
+ type: 'unhashed_when_hash_disabled',
+ workspaceId,
+ destinationId,
+ destType,
+ });
+ if (isHashingValidationEnabled()) {
+ throw new InstrumentationError(
+ `Hashing is disabled but the value for field ${propertyName} appears to be unhashed. Either enable hashing or send pre-hashed data.`,
+ );
+ }
+ }
+};
diff --git a/src/v0/util/errorTypes/deleteUsersError.ts b/src/v0/util/errorTypes/deleteUsersError.ts
new file mode 100644
index 00000000000..7b5328e671d
--- /dev/null
+++ b/src/v0/util/errorTypes/deleteUsersError.ts
@@ -0,0 +1,27 @@
+import { BaseError } from '@rudderstack/integrations-lib';
+
+/**
+ * DeleteUsersError wraps a BaseError to support separate messages
+ * for client response and logging/instrumentation.
+ *
+ * Use this when the error message contains PII that should not be logged,
+ * but needs to be returned to the client for debugging.
+ */
+export class DeleteUsersError extends BaseError {
+ logMessage: string;
+
+ /**
+ * @param baseError - The underlying error object
+ * @param logMessage - Sanitized message for logging (no PII)
+ */
+ constructor(baseError: BaseError, logMessage: string) {
+ super(
+ baseError.message,
+ baseError.status,
+ baseError.statTags,
+ baseError.destinationResponse,
+ baseError.authErrorCategory,
+ );
+ this.logMessage = logMessage;
+ }
+}
diff --git a/src/v0/util/errorTypes/index.js b/src/v0/util/errorTypes/index.js
index f3cef657cad..7f68742b5d7 100644
--- a/src/v0/util/errorTypes/index.js
+++ b/src/v0/util/errorTypes/index.js
@@ -1,7 +1,9 @@
const FilteredEventsError = require('./filteredEventsError');
const TransformerProxyError = require('./transformerProxyError');
+const { DeleteUsersError } = require('./deleteUsersError');
module.exports = {
+ DeleteUsersError,
FilteredEventsError,
TransformerProxyError,
};
diff --git a/src/v0/util/index.js b/src/v0/util/index.js
index 1bbb1b74478..860a2a1f3f6 100644
--- a/src/v0/util/index.js
+++ b/src/v0/util/index.js
@@ -75,11 +75,15 @@ const removeUndefinedAndNullRecurse = (obj) => {
};
const getEventTime = (message) => {
- try {
- return new Date(message.timestamp).toISOString();
- } catch (err) {
- return new Date(message.originalTimestamp).toISOString();
+ const timestamp = new Date(message.timestamp);
+ if (!Number.isNaN(timestamp.getTime())) {
+ return timestamp.toISOString();
+ }
+ const originalTimestamp = new Date(message.originalTimestamp);
+ if (!Number.isNaN(originalTimestamp.getTime())) {
+ return originalTimestamp.toISOString();
}
+ throw new InstrumentationError('Neither timestamp nor originalTimestamp is a valid date.');
};
const base64Convertor = (string) => Buffer.from(string).toString('base64');
diff --git a/src/v0/util/recordUtils.js b/src/v0/util/recordUtils.js
index 2df8a55ed0b..7af496d71c9 100644
--- a/src/v0/util/recordUtils.js
+++ b/src/v0/util/recordUtils.js
@@ -1,7 +1,11 @@
const { InstrumentationError } = require('@rudderstack/integrations-lib');
const { generateErrorObject, getErrorRespEvents } = require('./index');
-const eventTypes = ['update', 'insert', 'delete'];
+const EVENT_TYPES = {
+ INSERT: 'insert',
+ DELETE: 'delete',
+ UPDATE: 'update',
+};
function getErrorMetaData(inputs, acceptedOperations) {
const metadata = [];
@@ -18,7 +22,7 @@ function getErrorMetaData(inputs, acceptedOperations) {
function getErrorResponse(groupedRecordsByAction) {
const errorMetaData = [];
- const errorMetaDataObject = getErrorMetaData(groupedRecordsByAction, eventTypes);
+ const errorMetaDataObject = getErrorMetaData(groupedRecordsByAction, Object.values(EVENT_TYPES));
if (errorMetaDataObject.length > 0) {
errorMetaData.push(errorMetaDataObject);
}
@@ -52,4 +56,5 @@ function createFinalResponse(deleteResponse, insertResponse, updateResponse, err
module.exports = {
getErrorResponse,
createFinalResponse,
+ EVENT_TYPES,
};
diff --git a/src/v0/util/regulation-api.js b/src/v0/util/regulation-api.js
index 238993ee811..4924d5cab86 100644
--- a/src/v0/util/regulation-api.js
+++ b/src/v0/util/regulation-api.js
@@ -4,7 +4,7 @@ const RegulationApiUtils = {
/**
* Common validations that are part of `deleteUsers` api would be defined here
*
- * @param {Array<{ userId:string, email:string, phone:string}>} userAttributes Array of objects with userId, email and phone
+ * @param {Array<{}>} userAttributes Array of objects with userId, email and phone
*/
executeCommonValidations(userAttributes) {
if (!Array.isArray(userAttributes)) {
diff --git a/src/v0/util/tags.js b/src/v0/util/tags.js
index 1fdb5ddef2a..e5675558ea6 100644
--- a/src/v0/util/tags.js
+++ b/src/v0/util/tags.js
@@ -48,7 +48,6 @@ const ERROR_TYPES = {
THROTTLED: 'throttled',
RETRYABLE: 'retryable',
ABORTED: 'aborted',
- OAUTH_SECRET: 'oAuthSecret',
UNSUPPORTED: 'unsupported',
REDIS: 'redis',
FILTERED: 'filtered',
diff --git a/src/v1/destinations/hs/networkHandler.ts b/src/v1/destinations/hs/networkHandler.ts
index 8dd22b465f8..d02714185d2 100644
--- a/src/v1/destinations/hs/networkHandler.ts
+++ b/src/v1/destinations/hs/networkHandler.ts
@@ -1,7 +1,12 @@
import { TransformerProxyError } from '../../../v0/util/errorTypes';
import { prepareProxyRequest, proxyRequest } from '../../../adapters/network';
import { isHttpStatusSuccess, getAuthErrCategoryFromStCode } from '../../../v0/util/index';
-import { DeliveryV1Response, DeliveryJobState } from '../../../types/index';
+import {
+ DeliveryV1Response,
+ DeliveryJobState,
+ ProxyMetdata,
+ ProxyV1Request,
+} from '../../../types/index';
import { processAxiosResponse, getDynamicErrorType } from '../../../adapters/utils/networkUtils';
@@ -63,7 +68,87 @@ type Response = {
failureMessages?: Array