Skip to content

Commit f77b0bc

Browse files
abhandageclaude
andcommitted
Fix inconsistent field ordering in LiveRamp CSV generation
Previously, the order of fields in generated CSV files depended on the order payloads were processed, leading to inconsistent column positions across different batches sent to LiveRamp. Changes: - Modified generateFile() to sort fields alphabetically (after audience_key) - Ensures consistent field ordering regardless of payload order - audience_key always remains the first column - All other fields are sorted alphabetically for predictability Testing: - Added 2 new test cases demonstrating consistent field ordering - Updated 8 existing test cases with new alphabetically sorted expectations - All 36 tests passing Ticket: STRATCONN-6533 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent e7e0f25 commit f77b0bc

File tree

2 files changed

+213
-40
lines changed

2 files changed

+213
-40
lines changed

packages/destination-actions/src/destinations/liveramp-audiences/__tests__/operations.test.ts

Lines changed: 205 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -110,9 +110,9 @@ describe('Test operations', () => {
110110
const normalizedName = normalize('name', 'John Doe')
111111
const hashedName = processHashing(normalizedName, 'sha256', 'hex')
112112
const result = generateFile(payloads)
113-
const expected = `audience_key,name,email\n${enquoteIdentifier('1002')},${enquoteIdentifier(
114-
hashedName
115-
)},${enquoteIdentifier('john@example.com')}`
113+
const expected = `audience_key,email,name\n${enquoteIdentifier('1002')},${enquoteIdentifier(
114+
'john@example.com'
115+
)},${enquoteIdentifier(hashedName)}`
116116
expect(result.fileContents.toString()).toBe(expected)
117117
})
118118

@@ -138,11 +138,11 @@ describe('Test operations', () => {
138138
const hashedAlice = processHashing('Alice', 'sha256', 'hex', (value: string) => normalize('name', value))
139139
const hashedBob = processHashing('Bob', 'sha256', 'hex', (value: string) => normalize('name', value))
140140
const result = generateFile(payloads)
141-
const expected = `audience_key,name,email\n${enquoteIdentifier('1003')},${enquoteIdentifier(
142-
hashedAlice
143-
)},${enquoteIdentifier('alice@example.com')}\n${enquoteIdentifier('1004')},${enquoteIdentifier(
144-
hashedBob
145-
)},${enquoteIdentifier('bob@example.com')}`
141+
const expected = `audience_key,email,name\n${enquoteIdentifier('1003')},${enquoteIdentifier(
142+
'alice@example.com'
143+
)},${enquoteIdentifier(hashedAlice)}\n${enquoteIdentifier('1004')},${enquoteIdentifier(
144+
'bob@example.com'
145+
)},${enquoteIdentifier(hashedBob)}`
146146
expect(result.fileContents.toString()).toBe(expected)
147147
})
148148

@@ -176,9 +176,9 @@ describe('Test operations', () => {
176176
]
177177
const hashedNote = processHashing('Hello, "John"\nNew line', 'sha256', 'hex')
178178
const result = generateFile(payloads)
179-
const expected = `audience_key,note,email\n${enquoteIdentifier('1006')},${enquoteIdentifier(
180-
hashedNote
181-
)},${enquoteIdentifier('test@example.com')}`
179+
const expected = `audience_key,email,note\n${enquoteIdentifier('1006')},${enquoteIdentifier(
180+
'test@example.com'
181+
)},${enquoteIdentifier(hashedNote)}`
182182
expect(result.fileContents.toString()).toBe(expected)
183183
})
184184

@@ -345,9 +345,9 @@ describe('Test operations', () => {
345345
}
346346
]
347347
const result = generateFile(payloads)
348-
const expected = `audience_key,name,email\n${enquoteIdentifier('1011')},"",${enquoteIdentifier(
348+
const expected = `audience_key,email,name\n${enquoteIdentifier('1011')},${enquoteIdentifier(
349349
'test@example.com'
350-
)}`
350+
)},""`
351351
expect(result.fileContents.toString()).toBe(expected)
352352
})
353353

@@ -454,11 +454,11 @@ describe('Test operations', () => {
454454
const result = generateFile(payloads)
455455

456456
const expected = [
457-
`audience_key,first_name,email,liveramp_test`,
458-
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp 01')},${enquoteIdentifier(
459-
'liveramp-test-01@gmailx.com'
457+
`audience_key,email,first_name,liveramp_test`,
458+
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp-test-01@gmailx.com')},${enquoteIdentifier(
459+
'liveramp 01'
460460
)},${enquoteIdentifier('true')}`,
461-
`${enquoteIdentifier('test_audience')},,${enquoteIdentifier('liveramp-test-02@gmailx.com')},${enquoteIdentifier(
461+
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp-test-02@gmailx.com')},,${enquoteIdentifier(
462462
'true'
463463
)}`
464464
].join('\n')
@@ -497,11 +497,11 @@ describe('Test operations', () => {
497497
)
498498
const result = generateFile(payloads)
499499

500-
// Expected headers are audience_key, first_name, email, liveramp_test, unique_value
500+
// Expected headers are audience_key, email, first_name, liveramp_test, unique_value (alphabetically sorted)
501501
const expected = [
502-
`audience_key,first_name,email,liveramp_test,unique_value`,
503-
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp 01')},${enquoteIdentifier(
504-
'liveramp-test-01@gmailx.com'
502+
`audience_key,email,first_name,liveramp_test,unique_value`,
503+
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp-test-01@gmailx.com')},${enquoteIdentifier(
504+
'liveramp 01'
505505
)},${enquoteIdentifier('true')},`,
506506
`${enquoteIdentifier('test_audience')},,,${enquoteIdentifier('true')},${enquoteIdentifier(hashedUniqueValue)}` // Row with unique_value
507507
].join('\n')
@@ -583,14 +583,14 @@ describe('Test operations', () => {
583583
)
584584

585585
const expected = [
586-
`audience_key,first_name,email,liveramp_test,country,unique_value`,
587-
`${enquoteIdentifier('test_audience')},${enquoteIdentifier('liveramp 01')},${enquoteIdentifier(
588-
'liveramp-test-01@gmailx.com'
589-
)},${enquoteIdentifier('true')},,`,
590-
`${enquoteIdentifier('test_audience')},,${enquoteIdentifier('liveramp-test-02@gmailx.com')},${enquoteIdentifier(
591-
'true'
592-
)},${enquoteIdentifier(hashedCountry)},`,
593-
`${enquoteIdentifier('test_audience')},,,${enquoteIdentifier('true')},,${enquoteIdentifier(hashedUniqueValue)}`,
586+
`audience_key,country,email,first_name,liveramp_test,unique_value`,
587+
`${enquoteIdentifier('test_audience')},,${enquoteIdentifier('liveramp-test-01@gmailx.com')},${enquoteIdentifier(
588+
'liveramp 01'
589+
)},${enquoteIdentifier('true')},`,
590+
`${enquoteIdentifier('test_audience')},${enquoteIdentifier(hashedCountry)},${enquoteIdentifier(
591+
'liveramp-test-02@gmailx.com'
592+
)},,${enquoteIdentifier('true')},`,
593+
`${enquoteIdentifier('test_audience')},,,,${enquoteIdentifier('true')},${enquoteIdentifier(hashedUniqueValue)}`,
594594
`${enquoteIdentifier('test_audience')},,,,,`
595595
].join('\n')
596596
expect(result.fileContents.toString()).toBe(expected)
@@ -693,15 +693,185 @@ describe('Test operations', () => {
693693
const result = generateFile(payloads)
694694

695695
const expected = [
696-
'audience_key,FIRSTNAME,LASTNAME,ADDRESS1,ADDRESS2,CITY,STATE,ZIP,SHOPPERSCORE,LOVESDOGS,UNDER25,FAVORITECOLOR',
697-
'"35938495","Jane","Doe","100 Main St","Apt. A","Anytown","CA","123454545","54","1","1","Green"',
698-
'"103578302","John","Dough","123 Any St",,"Anytown","CA","123456565","87","1",,"Blue"',
699-
'"902833740","Sam","Sample","555 New Rd","Fl 17","Mysteryville","OK","957352436","36",,"1","Red"',
700-
'"328697301","Sarah","Sampel","987 Imaginary Ln",,"Buffetown","MI","436237235","99",,,"Blue"',
701-
'"993802274","Dolly","Data","456 Center Ave",,"Newtown","NE","586452778","12","1",,"Yellow"'
696+
'audience_key,ADDRESS1,ADDRESS2,CITY,FAVORITECOLOR,FIRSTNAME,LASTNAME,LOVESDOGS,SHOPPERSCORE,STATE,UNDER25,ZIP',
697+
'"35938495","100 Main St","Apt. A","Anytown","Green","Jane","Doe","1","54","CA","1","123454545"',
698+
'"103578302","123 Any St",,"Anytown","Blue","John","Dough","1","87","CA",,"123456565"',
699+
'"902833740","555 New Rd","Fl 17","Mysteryville","Red","Sam","Sample",,"36","OK","1","957352436"',
700+
'"328697301","987 Imaginary Ln",,"Buffetown","Blue","Sarah","Sampel",,"99","MI",,"436237235"',
701+
'"993802274","456 Center Ave",,"Newtown","Yellow","Dolly","Data","1","12","NE",,"586452778"'
702702
].join('\n')
703703

704704
expect(result.fileContents.toString()).toBe(expected)
705705
})
706+
707+
it('maintains consistent field order regardless of payload order', () => {
708+
// First batch: payload1 has fields A, B, C and payload2 has fields D, E, F
709+
const batch1: Payload[] = [
710+
{
711+
audience_key: 'user1',
712+
identifier_data: {
713+
field_a: 'value_a',
714+
field_b: 'value_b',
715+
field_c: 'value_c'
716+
},
717+
delimiter: ',',
718+
filename: 'output.csv',
719+
enable_batching: true
720+
},
721+
{
722+
audience_key: 'user2',
723+
identifier_data: {
724+
field_d: 'value_d',
725+
field_e: 'value_e',
726+
field_f: 'value_f'
727+
},
728+
delimiter: ',',
729+
filename: 'output.csv',
730+
enable_batching: true
731+
}
732+
]
733+
734+
// Second batch: same payloads but in REVERSE order
735+
const batch2: Payload[] = [
736+
{
737+
audience_key: 'user2',
738+
identifier_data: {
739+
field_d: 'value_d',
740+
field_e: 'value_e',
741+
field_f: 'value_f'
742+
},
743+
delimiter: ',',
744+
filename: 'output.csv',
745+
enable_batching: true
746+
},
747+
{
748+
audience_key: 'user1',
749+
identifier_data: {
750+
field_a: 'value_a',
751+
field_b: 'value_b',
752+
field_c: 'value_c'
753+
},
754+
delimiter: ',',
755+
filename: 'output.csv',
756+
enable_batching: true
757+
}
758+
]
759+
760+
const result1 = generateFile(batch1)
761+
const result2 = generateFile(batch2)
762+
763+
// Fields should now be alphabetically sorted: audience_key,field_a,field_b,field_c,field_d,field_e,field_f
764+
const expected = [
765+
'audience_key,field_a,field_b,field_c,field_d,field_e,field_f',
766+
'"user1","value_a","value_b","value_c",,,',
767+
'"user2",,,,"value_d","value_e","value_f"'
768+
].join('\n')
769+
770+
const expected2 = [
771+
'audience_key,field_a,field_b,field_c,field_d,field_e,field_f',
772+
'"user2",,,,"value_d","value_e","value_f"',
773+
'"user1","value_a","value_b","value_c",,,'
774+
].join('\n')
775+
776+
expect(result1.fileContents.toString()).toBe(expected)
777+
expect(result2.fileContents.toString()).toBe(expected2)
778+
779+
// Field order should now be consistent (same headers)
780+
const headers1 = result1.fileContents.toString().split('\n')[0]
781+
const headers2 = result2.fileContents.toString().split('\n')[0]
782+
expect(headers1).toBe(headers2)
783+
})
784+
785+
it('maintains consistent field order with overlapping and unique fields', () => {
786+
// Scenario: Multiple payloads with some shared fields and some unique fields
787+
// Fields should be alphabetically sorted regardless of payload order
788+
789+
const batch1: Payload[] = [
790+
{
791+
audience_key: 'user1',
792+
identifier_data: {
793+
email: 'user1@example.com',
794+
first_name: 'John',
795+
age: '30'
796+
},
797+
delimiter: ',',
798+
filename: 'output.csv',
799+
enable_batching: true
800+
},
801+
{
802+
audience_key: 'user2',
803+
identifier_data: {
804+
email: 'user2@example.com',
805+
last_name: 'Doe',
806+
city: 'NYC'
807+
},
808+
delimiter: ',',
809+
filename: 'output.csv',
810+
enable_batching: true
811+
},
812+
{
813+
audience_key: 'user3',
814+
identifier_data: {
815+
email: 'user3@example.com',
816+
phone: '555-1234',
817+
country: 'USA'
818+
},
819+
delimiter: ',',
820+
filename: 'output.csv',
821+
enable_batching: true
822+
}
823+
]
824+
825+
// Same data but user3 comes first
826+
const batch2: Payload[] = [
827+
{
828+
audience_key: 'user3',
829+
identifier_data: {
830+
email: 'user3@example.com',
831+
phone: '555-1234',
832+
country: 'USA'
833+
},
834+
delimiter: ',',
835+
filename: 'output.csv',
836+
enable_batching: true
837+
},
838+
{
839+
audience_key: 'user1',
840+
identifier_data: {
841+
email: 'user1@example.com',
842+
first_name: 'John',
843+
age: '30'
844+
},
845+
delimiter: ',',
846+
filename: 'output.csv',
847+
enable_batching: true
848+
},
849+
{
850+
audience_key: 'user2',
851+
identifier_data: {
852+
email: 'user2@example.com',
853+
last_name: 'Doe',
854+
city: 'NYC'
855+
},
856+
delimiter: ',',
857+
filename: 'output.csv',
858+
enable_batching: true
859+
}
860+
]
861+
862+
const result1 = generateFile(batch1)
863+
const result2 = generateFile(batch2)
864+
865+
// Both should have alphabetically sorted fields: audience_key,age,city,country,email,first_name,last_name,phone
866+
const headers1 = result1.fileContents.toString().split('\n')[0]
867+
const headers2 = result2.fileContents.toString().split('\n')[0]
868+
869+
// Field order should now be consistent
870+
expect(headers1).toBe(headers2)
871+
872+
// Verify the specific alphabetically sorted field order
873+
expect(headers1).toBe('audience_key,age,city,country,email,first_name,last_name,phone')
874+
expect(headers2).toBe('audience_key,age,city,country,email,first_name,last_name,phone')
875+
})
706876
})
707877
})

packages/destination-actions/src/destinations/liveramp-audiences/operations.ts

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,10 @@ function generateFile(payloads: s3Payload[] | sftpPayload[]) {
5151
}
5252

5353
// Convert headers to an ordered array for consistent indexing
54+
// Sort alphabetically (excluding audience_key which is always first) to ensure consistent field order
5455
const headerArray = Array.from(headers)
56+
const otherHeaders = headerArray.filter((h) => h !== 'audience_key').sort()
57+
const sortedHeaderArray = ['audience_key', ...otherHeaders]
5558

5659
// Declare rows as an empty Buffer
5760
let rows = Buffer.from('')
@@ -60,9 +63,9 @@ function generateFile(payloads: s3Payload[] | sftpPayload[]) {
6063
for (let i = 0; i < payloads.length; i++) {
6164
const payload = payloads[i]
6265
// Initialize row with empty strings aligned with header count
63-
const row: string[] = new Array(headerArray.length).fill('')
66+
const row: string[] = new Array(sortedHeaderArray.length).fill('')
6467

65-
row[headerArray.indexOf('audience_key')] = enquoteIdentifier(payload.audience_key)
68+
row[sortedHeaderArray.indexOf('audience_key')] = enquoteIdentifier(payload.audience_key)
6669

6770
// Using a set to keep track of unhashed_identifier_data keys that have already been processed
6871
// This guarantees that when both hashed and unhashed keys share the same key-value pair the unhashed one
@@ -72,7 +75,7 @@ function generateFile(payloads: s3Payload[] | sftpPayload[]) {
7275
// Process unhashed_identifier_data first
7376
if (payload.unhashed_identifier_data) {
7477
for (const key of Object.keys(payload.unhashed_identifier_data)) {
75-
const index = headerArray.indexOf(key)
78+
const index = sortedHeaderArray.indexOf(key)
7679
unhashedKeys.add(key)
7780
/*Identifiers need to be hashed according to LiveRamp spec's: https://docs.liveramp.com/connect/en/formatting-identifiers.html
7881
Phone Number requires SHA1 and email uses sha256 */
@@ -94,7 +97,7 @@ function generateFile(payloads: s3Payload[] | sftpPayload[]) {
9497
// if a key exists in both identifier_data and unhashed_identifier_data
9598
// the value from identifier_data will be skipped, prioritizing the unhashed_identifier_data value.
9699
if (!unhashedKeys.has(key)) {
97-
const index = headerArray.indexOf(key)
100+
const index = sortedHeaderArray.indexOf(key)
98101
row[index] = enquoteIdentifier(String(payload.identifier_data[key]))
99102
}
100103
}
@@ -106,7 +109,7 @@ function generateFile(payloads: s3Payload[] | sftpPayload[]) {
106109
}
107110

108111
// Add headers to the beginning of the file contents
109-
rows = Buffer.concat([Buffer.from(headerArray.join(payloads[0].delimiter) + '\n'), rows])
112+
rows = Buffer.concat([Buffer.from(sortedHeaderArray.join(payloads[0].delimiter) + '\n'), rows])
110113

111114
const filename = payloads[0].filename
112115
return { filename, fileContents: rows }

0 commit comments

Comments
 (0)