Skip to content

Commit f09c2e2

Browse files
authored
feat: add cursor pagination to address transactions endpoint (#2422)
* feat: cursor pagination * fix: tests
1 parent dac3ef5 commit f09c2e2

File tree

3 files changed

+154
-9
lines changed

3 files changed

+154
-9
lines changed

src/api/routes/v2/addresses.ts

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ import {
1919
ExcludeFunctionArgsParamSchema,
2020
} from '../../schemas/params';
2121
import { getPagingQueryLimit, ResourceType } from '../../pagination';
22-
import { PaginatedResponse } from '../../schemas/util';
22+
import { PaginatedCursorResponse, PaginatedResponse } from '../../schemas/util';
2323
import {
2424
AddressTransaction,
2525
AddressTransactionEvent,
@@ -54,7 +54,7 @@ export const AddressRoutesV2: FastifyPluginAsync<
5454
exclude_function_args: ExcludeFunctionArgsParamSchema,
5555
}),
5656
response: {
57-
200: PaginatedResponse(AddressTransactionSchema),
57+
200: PaginatedCursorResponse(AddressTransactionSchema),
5858
},
5959
},
6060
},
@@ -64,7 +64,15 @@ export const AddressRoutesV2: FastifyPluginAsync<
6464
const excludeFunctionArgs = req.query.exclude_function_args ?? false;
6565

6666
try {
67-
const { limit, offset, results, total } = await fastify.db.v2.getAddressTransactions({
67+
const {
68+
limit,
69+
offset,
70+
results,
71+
total,
72+
next_cursor,
73+
prev_cursor,
74+
current_cursor: cursor,
75+
} = await fastify.db.v2.getAddressTransactions({
6876
...params,
6977
...query,
7078
});
@@ -75,6 +83,9 @@ export const AddressRoutesV2: FastifyPluginAsync<
7583
limit,
7684
offset,
7785
total,
86+
next_cursor,
87+
prev_cursor,
88+
cursor,
7889
results: transfers,
7990
});
8091
} catch (error) {

src/datastore/pg-store-v2.ts

Lines changed: 95 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -528,11 +528,49 @@ export class PgStoreV2 extends BasePgStoreModule {
528528
}
529529

530530
async getAddressTransactions(
531-
args: AddressParams & TransactionPaginationQueryParams
532-
): Promise<DbPaginatedResult<DbTxWithAddressTransfers>> {
531+
args: AddressParams & TransactionPaginationQueryParams & { cursor?: string }
532+
): Promise<DbCursorPaginatedResult<DbTxWithAddressTransfers>> {
533533
return await this.sqlTransaction(async sql => {
534534
const limit = args.limit ?? TransactionLimitParamSchema.default;
535535
const offset = args.offset ?? 0;
536+
537+
// Parse cursor if provided (format: "indexBlockHash:microblockSequence:txIndex")
538+
let cursorFilter = sql``;
539+
if (args.cursor) {
540+
const parts = args.cursor.split(':');
541+
if (parts.length !== 3) {
542+
throw new InvalidRequestError(
543+
'Invalid cursor format',
544+
InvalidRequestErrorType.invalid_param
545+
);
546+
}
547+
const [indexBlockHash, microblockSequenceStr, txIndexStr] = parts;
548+
const microblockSequence = parseInt(microblockSequenceStr, 10);
549+
const txIndex = parseInt(txIndexStr, 10);
550+
if (!indexBlockHash || isNaN(microblockSequence) || isNaN(txIndex)) {
551+
throw new InvalidRequestError(
552+
'Invalid cursor format',
553+
InvalidRequestErrorType.invalid_param
554+
);
555+
}
556+
// Look up block_height from index_block_hash for the row-value comparison
557+
const blockHeightQuery = await sql<{ block_height: number }[]>`
558+
SELECT block_height FROM blocks WHERE index_block_hash = ${indexBlockHash} LIMIT 1
559+
`;
560+
if (blockHeightQuery.length === 0) {
561+
throw new InvalidRequestError(
562+
'Invalid cursor: block not found',
563+
InvalidRequestErrorType.invalid_param
564+
);
565+
}
566+
const blockHeight = blockHeightQuery[0].block_height;
567+
568+
cursorFilter = sql`
569+
AND (p.block_height, p.microblock_sequence, p.tx_index)
570+
<= (${blockHeight}, ${microblockSequence}, ${txIndex})
571+
`;
572+
}
573+
536574
const resultQuery = await sql<(AddressTransfersTxQueryResult & { count: number })[]>`
537575
SELECT
538576
${sql(prefixedCols(TX_COLUMNS, 't'))},
@@ -555,17 +593,68 @@ export class PgStoreV2 extends BasePgStoreModule {
555593
WHERE p.principal = ${args.address}
556594
AND p.canonical = TRUE
557595
AND p.microblock_canonical = TRUE
596+
${cursorFilter}
558597
ORDER BY p.block_height DESC, p.microblock_sequence DESC, p.tx_index DESC
559-
LIMIT ${limit}
560-
OFFSET ${offset}
598+
LIMIT ${limit + 1}
561599
`;
562-
const total = resultQuery.length > 0 ? resultQuery[0].count : 0;
563-
const parsed = resultQuery.map(r => parseAccountTransferSummaryTxQueryResult(r));
600+
601+
const hasNextPage = resultQuery.count > limit;
602+
const results = hasNextPage ? resultQuery.slice(0, limit) : resultQuery;
603+
604+
const total = resultQuery.count > 0 ? resultQuery[0].count : 0;
605+
const parsed = results.map(r => parseAccountTransferSummaryTxQueryResult(r));
606+
607+
// Generate prev cursor from the last result
608+
const lastResult = resultQuery[resultQuery.length - 1];
609+
const prevCursor =
610+
hasNextPage && lastResult
611+
? `${lastResult.index_block_hash}:${lastResult.microblock_sequence}:${lastResult.tx_index}`
612+
: null;
613+
614+
// Generate current cursor from first result
615+
const firstResult = results[0];
616+
const currentCursor = firstResult
617+
? `${firstResult.index_block_hash}:${firstResult.microblock_sequence}:${firstResult.tx_index}`
618+
: null;
619+
620+
// Generate next cursor by looking for the first item of the previous page
621+
let nextCursor: string | null = null;
622+
if (firstResult) {
623+
// Find the item that would start the previous page
624+
// We look for items "before" our current first result (greater in DESC order)
625+
// and skip (limit - 1) to find the start of that page
626+
const prevQuery = await sql<
627+
{ index_block_hash: string; microblock_sequence: number; tx_index: number }[]
628+
>`
629+
SELECT p.index_block_hash, p.microblock_sequence, p.tx_index
630+
FROM principal_txs AS p
631+
WHERE p.principal = ${args.address}
632+
AND p.canonical = TRUE
633+
AND p.microblock_canonical = TRUE
634+
AND (p.block_height, p.microblock_sequence, p.tx_index)
635+
> (
636+
${firstResult.block_height},
637+
${firstResult.microblock_sequence},
638+
${firstResult.tx_index}
639+
)
640+
ORDER BY p.block_height ASC, p.microblock_sequence ASC, p.tx_index ASC
641+
OFFSET ${limit - 1}
642+
LIMIT 1
643+
`;
644+
if (prevQuery.length > 0) {
645+
const prev = prevQuery[0];
646+
nextCursor = `${prev.index_block_hash}:${prev.microblock_sequence}:${prev.tx_index}`;
647+
}
648+
}
649+
564650
return {
565651
total,
566652
limit,
567653
offset,
568654
results: parsed,
655+
next_cursor: nextCursor,
656+
prev_cursor: prevCursor,
657+
current_cursor: currentCursor,
569658
};
570659
});
571660
}

tests/api/address.test.ts

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -986,6 +986,51 @@ describe('address tests', () => {
986986
],
987987
};
988988
expect(JSON.parse(fetch2.text)).toEqual(expected2);
989+
990+
// Cursor fetch
991+
const cursorFetch1 = await supertest(api.server).get(
992+
`/extended/v2/addresses/${testAddr2}/transactions?limit=2`
993+
);
994+
const cursorFetch1Json = JSON.parse(cursorFetch1.text);
995+
expect(cursorFetch1Json.cursor).toBeDefined();
996+
expect(cursorFetch1Json.limit).toBe(2);
997+
expect(cursorFetch1Json.offset).toBe(0);
998+
expect(cursorFetch1Json.total).toBe(7);
999+
expect(cursorFetch1Json.results).toHaveLength(2);
1000+
expect(cursorFetch1Json.results[0].tx).toEqual(v2Fetch1Json.results[0].tx);
1001+
expect(cursorFetch1Json.results[1].tx).toEqual(v2Fetch1Json.results[1].tx);
1002+
expect(cursorFetch1Json.next_cursor).toBeNull();
1003+
expect(cursorFetch1Json.prev_cursor).toBeDefined();
1004+
1005+
// First cursor should be equivalent to the original fetch
1006+
const cursorFetch2 = await supertest(api.server).get(
1007+
`/extended/v2/addresses/${testAddr2}/transactions?cursor=${cursorFetch1Json.cursor}&limit=2`
1008+
);
1009+
const cursorFetch2Json = JSON.parse(cursorFetch2.text);
1010+
expect(cursorFetch2Json.cursor).toBe(cursorFetch1Json.cursor);
1011+
expect(cursorFetch2Json.limit).toBe(2);
1012+
expect(cursorFetch2Json.offset).toBe(0);
1013+
expect(cursorFetch2Json.total).toBe(7);
1014+
expect(cursorFetch2Json.results).toHaveLength(2);
1015+
expect(cursorFetch2Json.results[0].tx).toEqual(v2Fetch1Json.results[0].tx);
1016+
expect(cursorFetch2Json.results[1].tx).toEqual(v2Fetch1Json.results[1].tx);
1017+
expect(cursorFetch2Json.next_cursor).toBeNull();
1018+
expect(cursorFetch2Json.prev_cursor).not.toBeNull();
1019+
1020+
// Go back one page
1021+
const cursorFetch3 = await supertest(api.server).get(
1022+
`/extended/v2/addresses/${testAddr2}/transactions?cursor=${cursorFetch2Json.prev_cursor}&limit=2`
1023+
);
1024+
const cursorFetch3Json = JSON.parse(cursorFetch3.text);
1025+
expect(cursorFetch3Json.cursor).toBe(cursorFetch2Json.prev_cursor);
1026+
expect(cursorFetch3Json.limit).toBe(2);
1027+
expect(cursorFetch3Json.offset).toBe(0);
1028+
expect(cursorFetch3Json.total).toBe(7);
1029+
expect(cursorFetch3Json.results).toHaveLength(2);
1030+
expect(cursorFetch3Json.results[0].tx).toEqual(v2Fetch1Json.results[2].tx);
1031+
expect(cursorFetch3Json.results[1].tx).toEqual(v2Fetch1Json.results[3].tx);
1032+
expect(cursorFetch3Json.next_cursor).toBe(cursorFetch2Json.cursor);
1033+
expect(cursorFetch3Json.prev_cursor).not.toBeNull();
9891034
});
9901035

9911036
test('address nonce', async () => {

0 commit comments

Comments
 (0)