Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 29 additions & 11 deletions src/api/routes/contract.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@ import { LimitParam, OffsetParam } from '../schemas/params';
import { InvalidRequestError, InvalidRequestErrorType, NotFoundError } from '../../errors';
import { ClarityAbi } from '@stacks/transactions';
import { SmartContractSchema } from '../schemas/entities/smart-contracts';
import { TransactionEventSchema } from '../schemas/entities/transaction-events';
import { SmartContractLogTransactionEvent } from '../schemas/entities/transaction-events';
import { ContractEventListResponseSchema } from '../schemas/responses/responses';

export const ContractRoutes: FastifyPluginAsync<
Record<never, never>,
Expand Down Expand Up @@ -114,33 +115,50 @@ export const ContractRoutes: FastifyPluginAsync<
querystring: Type.Object({
limit: LimitParam(ResourceType.Contract, 'Limit', 'max number of events to fetch'),
offset: OffsetParam(),
cursor: Type.Optional(
Type.String({
description: 'Cursor for pagination in the format: indexBlockHash:txIndex:eventIndex',
})
),
}),
response: {
200: Type.Object(
{
limit: Type.Integer(),
offset: Type.Integer(),
results: Type.Array(TransactionEventSchema),
},
{ description: 'List of events' }
),
200: ContractEventListResponseSchema,
},
},
},
async (req, reply) => {
const { contract_id } = req.params;
const limit = getPagingQueryLimit(ResourceType.Contract, req.query.limit);
const offset = parsePagingQueryInput(req.query.offset ?? 0);
const cursor = req.query.cursor;

// Validate cursor format if provided
if (cursor && !cursor.match(/^[0-9a-fA-F]{64}:\d+:\d+$/)) {
throw new InvalidRequestError(
'Invalid cursor format. Expected format: indexBlockHash:txIndex:eventIndex',
InvalidRequestErrorType.invalid_param
);
}
const eventsQuery = await fastify.db.getSmartContractEvents({
contractId: contract_id,
limit,
offset,
cursor,
});
if (!eventsQuery.found) {
throw new NotFoundError(`cannot find events for contract by ID}`);
}
const parsedEvents = eventsQuery.result.map(event => parseDbEvent(event));
await reply.send({ limit, offset, results: parsedEvents });
const parsedEvents = eventsQuery.result.map((event: any) => parseDbEvent(event));
const response = {
limit,
offset,
total: eventsQuery.total || 0,
results: parsedEvents as SmartContractLogTransactionEvent[],
next_cursor: eventsQuery.nextCursor || null,
prev_cursor: eventsQuery.prevCursor || null,
cursor: cursor || null,
};
await reply.send(response);
}
);

Expand Down
2 changes: 1 addition & 1 deletion src/api/schemas/entities/transaction-events.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ const AbstractTransactionEventSchema = Type.Object(
);
type AbstractTransactionEvent = Static<typeof AbstractTransactionEventSchema>;

const SmartContractLogTransactionEventSchema = Type.Intersect(
export const SmartContractLogTransactionEventSchema = Type.Intersect(
[
AbstractTransactionEventSchema,
Type.Object({
Expand Down
11 changes: 9 additions & 2 deletions src/api/schemas/responses/responses.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import { Static, Type } from '@sinclair/typebox';
import { Nullable, OptionalNullable, PaginatedCursorResponse, PaginatedResponse } from '../util';
import { OptionalNullable, PaginatedCursorResponse, PaginatedResponse } from '../util';
import { MempoolStatsSchema } from '../entities/mempool-transactions';
import { MempoolTransactionSchema, TransactionSchema } from '../entities/transactions';
import { MicroblockSchema } from '../entities/microblock';
import {
AddressTransactionWithTransfersSchema,
InboundStxTransferSchema,
} from '../entities/addresses';
import { TransactionEventSchema } from '../entities/transaction-events';
import {
SmartContractLogTransactionEventSchema,
TransactionEventSchema,
} from '../entities/transaction-events';
import {
BurnchainRewardSchema,
BurnchainRewardSlotHolderSchema,
Expand Down Expand Up @@ -184,5 +187,9 @@ export type RunFaucetResponse = Static<typeof RunFaucetResponseSchema>;
export const BlockListV2ResponseSchema = PaginatedCursorResponse(NakamotoBlockSchema);
export type BlockListV2Response = Static<typeof BlockListV2ResponseSchema>;

export const ContractEventListResponseSchema = PaginatedCursorResponse(
SmartContractLogTransactionEventSchema
);

export const BlockSignerSignatureResponseSchema = PaginatedResponse(SignerSignatureSchema);
export type BlockSignerSignatureResponse = Static<typeof BlockSignerSignatureResponseSchema>;
8 changes: 8 additions & 0 deletions src/datastore/common.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { FoundOrNot } from 'src/helpers';
import { Block } from '../api/schemas/entities/block';
import { SyntheticPoxEventName } from '../pox-helpers';
import { PgBytea, PgJsonb, PgNumeric } from '@hirosystems/api-toolkit';
Expand Down Expand Up @@ -552,6 +553,13 @@ export interface DbSmartContractEvent extends DbEventBase {
value: string;
}

export type DbCursorPaginatedFoundOrNot<T> = FoundOrNot<T> & {
nextCursor?: string | null;
prevCursor?: string | null;
currentCursor?: string | null;
total: number;
};

export interface DbStxLockEvent extends DbEventBase {
event_type: DbEventTypeId.StxLock;
locked_amount: bigint;
Expand Down
170 changes: 153 additions & 17 deletions src/datastore/pg-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import {
bnsNameFromSubdomain,
ChainID,
REPO_DIR,
normalizeHashString,
} from '../helpers';
import { PgStoreEventEmitter } from './pg-store-event-emitter';
import {
Expand Down Expand Up @@ -42,6 +43,7 @@ import {
DbSearchResult,
DbSmartContract,
DbSmartContractEvent,
DbCursorPaginatedFoundOrNot,
DbStxBalance,
DbStxEvent,
DbStxLockEvent,
Expand Down Expand Up @@ -98,9 +100,32 @@ import * as path from 'path';
import { PgStoreV2 } from './pg-store-v2';
import { Fragment } from 'postgres';
import { parseBlockParam } from '../api/routes/v2/schemas';
import { sql } from 'node-pg-migrate/dist/operations/other';

export const MIGRATIONS_DIR = path.join(REPO_DIR, 'migrations');

// Cursor utilities for smart contract events
function createEventCursor(indexBlockHash: string, txIndex: number, eventIndex: number): string {
return `${indexBlockHash}:${txIndex}:${eventIndex}`;
}

function parseEventCursor(
cursor: string
): { indexBlockHash: string; txIndex: number; eventIndex: number } | null {
const parts = cursor.split(':');
if (parts.length !== 3) return null;
const indexBlockHash = parts[0];
const txIndex = parseInt(parts[1]);
const eventIndex = parseInt(parts[2]);

// Validate that parsing was successful (indexBlockHash should be hex, txIndex and eventIndex should be numbers)
if (!indexBlockHash.match(/^[0-9a-fA-F]{64}$/) || isNaN(txIndex) || isNaN(eventIndex)) {
return null;
}

return { indexBlockHash, txIndex, eventIndex };
}

/**
* This is the main interface between the API and the Postgres database. It contains all methods that
* query the DB in search for blockchain data to be returned via endpoints or WebSockets/Socket.IO.
Expand Down Expand Up @@ -2096,35 +2121,138 @@ export class PgStore extends BasePgStore {
});
}

async getSmartContractEvents({
contractId,
limit,
offset,
}: {
async getSmartContractEvents(args: {
contractId: string;
limit: number;
offset: number;
}): Promise<FoundOrNot<DbSmartContractEvent[]>> {
offset?: number;
cursor?: string;
}): Promise<DbCursorPaginatedFoundOrNot<DbSmartContractEvent[]>> {
const contractId = args.contractId;
const limit = args.limit;
const offset = args.offset ?? 0;
const cursor = args.cursor ?? null;

// Parse cursor if provided
const parsedCursor = cursor ? parseEventCursor(cursor) : null;

// Get total count first
const totalCountResult = await this.sql<{ count: string }[]>`
SELECT COUNT(*) as count
FROM contract_logs
WHERE contract_identifier = ${contractId}
AND canonical = true
AND microblock_canonical = true
`;
const totalCount = parseInt(totalCountResult[0]?.count || '0');

// If cursor is provided, look up the block_height from index_block_hash
let cursorBlockHeight: number | null = null;
let cursorFilter = this.sql``;
if (parsedCursor) {
const normalizedHash = normalizeHashString(parsedCursor.indexBlockHash);
if (normalizedHash === false) {
throw new Error(`Invalid index_block_hash in cursor: ${parsedCursor.indexBlockHash}`);
}
const blockHeightResult = await this.sql<{ block_height: number }[]>`
SELECT block_height
FROM blocks
WHERE index_block_hash = ${normalizedHash} AND canonical = true
LIMIT 1
`;
if (blockHeightResult.length === 0) {
// Cursor references a block that doesn't exist or was re-orged
throw new Error(
`Block not found for cursor index_block_hash: ${parsedCursor.indexBlockHash}`
);
}
cursorBlockHeight = blockHeightResult[0].block_height;

cursorFilter = this
.sql`AND (block_height, tx_index, event_index) < (${cursorBlockHeight}, ${parsedCursor.txIndex}, ${parsedCursor.eventIndex})`;
}

const logResults = await this.sql<
{
event_index: number;
tx_id: string;
tx_index: number;
block_height: number;
index_block_hash: string;
contract_identifier: string;
topic: string;
value: string;
}[]
>`
SELECT
event_index, tx_id, tx_index, block_height, contract_identifier, topic, value
event_index, tx_id, tx_index, block_height, encode(index_block_hash, 'hex') as index_block_hash,
contract_identifier, topic, value
FROM contract_logs
WHERE canonical = true AND microblock_canonical = true AND contract_identifier = ${contractId}
WHERE canonical = true
AND microblock_canonical = true
AND contract_identifier = ${contractId}
${cursorFilter}
ORDER BY block_height DESC, microblock_sequence DESC, tx_index DESC, event_index DESC
LIMIT ${limit}
OFFSET ${offset}
LIMIT ${limit + 1}
${cursor ? this.sql`` : this.sql`OFFSET ${offset}`}
`;
const result = logResults.map(result => {

// Check if there are more results (for prev cursor)
const hasNextPage = logResults.length > limit;
const results = hasNextPage ? logResults.slice(0, limit) : logResults;

// Generate prev cursor from the last result
const lastResult = results[results.length - 1];
const prevCursor =
hasNextPage && results.length > 0
? createEventCursor(
lastResult.index_block_hash,
lastResult.tx_index,
lastResult.event_index
)
: null;

// Generate current cursor from the first result
const firstResult = results[0];
const currentCursor = firstResult
? createEventCursor(
firstResult.index_block_hash,
firstResult.tx_index,
firstResult.event_index
)
: null;

// Generate next cursor from the first result of last page
let nextCursor: string | null = null;
if (firstResult) {
const prevEvents = await this.sql<
{
event_index: number;
tx_index: number;
index_block_hash: string;
}[]
>`
SELECT event_index, tx_index, encode(index_block_hash, 'hex') as index_block_hash
FROM contract_logs
WHERE canonical = true
AND microblock_canonical = true
AND contract_identifier = ${contractId}
AND (block_height, tx_index, event_index) >
(${firstResult.block_height}, ${firstResult.tx_index}, ${firstResult.event_index})
ORDER BY block_height ASC, microblock_sequence ASC, tx_index ASC, event_index ASC
OFFSET ${limit - 1}
LIMIT 1
`;
if (prevEvents.length > 0) {
const event = prevEvents[0];
nextCursor = createEventCursor(event.index_block_hash, event.tx_index, event.event_index);
}
}

console.log({ nextCursor, prevCursor, currentCursor });
console.log({ firstResult, lastResult });

// Map to DbSmartContractEvent format
const mappedResults = results.map(result => {
const event: DbSmartContractEvent = {
event_index: result.event_index,
tx_id: result.tx_id,
Expand All @@ -2138,7 +2266,15 @@ export class PgStore extends BasePgStore {
};
return event;
});
return { found: true, result };

return {
found: true,
result: mappedResults,
nextCursor,
prevCursor,
currentCursor,
total: totalCount,
};
}

async getSmartContractByTrait(args: {
Expand Down Expand Up @@ -3334,15 +3470,15 @@ export class PgStore extends BasePgStore {
{ address: string; balance: string; count: number; total_supply: string }[]
>`
WITH totals AS (
SELECT
SELECT
SUM(balance) AS total,
COUNT(*)::int AS total_count
FROM ft_balances
WHERE token = ${args.token}
)
SELECT
fb.address,
fb.balance,
SELECT
fb.address,
fb.balance,
ts.total AS total_supply,
ts.total_count AS count
FROM ft_balances fb
Expand Down
Loading