diff --git a/packages/synapse-sdk/src/pdp/verifier.ts b/packages/synapse-sdk/src/pdp/verifier.ts index a17fadd..336c7f1 100644 --- a/packages/synapse-sdk/src/pdp/verifier.ts +++ b/packages/synapse-sdk/src/pdp/verifier.ts @@ -113,6 +113,44 @@ export class PDPVerifier { } } + /** + * Get active pieces for a data set with pagination + * @param dataSetId - The PDPVerifier data set ID + * @param options - Optional configuration object + * @param options.offset - The offset to start from (default: 0) + * @param options.limit - The maximum number of pieces to return (default: 100) + * @param options.signal - Optional AbortSignal to cancel the operation + * @returns Object containing pieces, piece IDs, raw sizes, and hasMore flag + */ + async getActivePieces( + dataSetId: number, + options?: { + offset?: number + limit?: number + signal?: AbortSignal + } + ): Promise<{ + pieces: Array<{ data: string }> + pieceIds: number[] + // NOTE: the contract returns rawSizes here, but we do not return it from here. + hasMore: boolean + }> { + const offset = options?.offset ?? 0 + const limit = options?.limit ?? 100 + const signal = options?.signal + + if (signal?.aborted) { + throw new Error('Operation aborted') + } + + const result = await this._contract.getActivePieces(dataSetId, offset, limit) + return { + pieces: result[0].map((piece: any) => ({ data: piece.data })), + pieceIds: result[1].map((id: bigint) => Number(id)), + hasMore: result[3], + } + } + /** * Get the PDPVerifier contract address for the current network */ diff --git a/packages/synapse-sdk/src/storage/context.ts b/packages/synapse-sdk/src/storage/context.ts index 15f5294..3175fea 100644 --- a/packages/synapse-sdk/src/storage/context.ts +++ b/packages/synapse-sdk/src/storage/context.ts @@ -22,9 +22,11 @@ * ``` */ -import type { ethers } from 'ethers' +import { ethers } from 'ethers' +import { CID } from 'multiformats/cid' import type { PaymentsService } from '../payments/index.ts' import { PDPAuthHelper, PDPServer } from '../pdp/index.ts' +import { PDPVerifier } from '../pdp/verifier.ts' import { asPieceCID } from '../piece/index.ts' import { SPRegistryService } from '../sp-registry/index.ts' import type { ProviderInfo } from '../sp-registry/types.ts' @@ -1353,14 +1355,73 @@ export class StorageContext { } /** - * Get the list of piece CIDs for this service service's data set by querying the PDP server. + * Get the list of piece CIDs for this service service's data set. + * Gets data directly from PDPVerifier contract (source of truth) rather than Curio. * @returns Array of piece CIDs as PieceCID objects + * @deprecated Use getPieces() generator for better memory efficiency with large data sets */ async getDataSetPieces(): Promise { - const dataSetData = await this._pdpServer.getDataSet(this._dataSetId) - return dataSetData.pieces.map((piece) => piece.pieceCid) + const pieces: PieceCID[] = [] + for await (const [pieceCid] of this.getPieces()) { + pieces.push(pieceCid) + } + return pieces } + /** + * Get all active pieces for this data set as an async generator. + * This provides lazy evaluation and better memory efficiency for large data sets. + * Gets data directly from PDPVerifier contract (source of truth) rather than Curio. + * @param options - Optional configuration object + * @param options.batchSize - The batch size for each pagination call (default: 100) + * @param options.signal - Optional AbortSignal to cancel the operation + * @yields Tuple of [PieceCID, pieceId] - the piece ID is needed for certain operations like deletion + */ + async *getPieces(options?: { batchSize?: number; signal?: AbortSignal }): AsyncGenerator<[PieceCID, number]> { + const pdpVerifierAddress = this._warmStorageService.getPDPVerifierAddress() + const pdpVerifier = new PDPVerifier(this._synapse.getProvider(), pdpVerifierAddress) + + const batchSize = options?.batchSize ?? 100 + const signal = options?.signal + let offset = 0 + let hasMore = true + + while (hasMore) { + if (signal?.aborted) { + throw createError('StorageContext', 'getPieces', 'Operation aborted') + } + + const result = await pdpVerifier.getActivePieces(this._dataSetId, { offset, limit: batchSize, signal }) + + // Yield pieces one by one for lazy evaluation + for (let i = 0; i < result.pieces.length; i++) { + if (signal?.aborted) { + throw createError('StorageContext', 'getPieces', 'Operation aborted') + } + + // Parse the piece data as a PieceCID + // The contract stores the full PieceCID multihash digest (including height and padding) + // The data comes as a hex string from ethers, we need to decode it as bytes then as a CID + const pieceDataHex = result.pieces[i].data + const pieceDataBytes = ethers.getBytes(pieceDataHex) + + const cid = CID.decode(pieceDataBytes) + const pieceCid = asPieceCID(cid) + if (!pieceCid) { + throw createError( + 'StorageContext', + 'getPieces', + `Invalid PieceCID returned from contract for piece ${result.pieceIds[i]}` + ) + } + + yield [pieceCid, result.pieceIds[i]] + } + + hasMore = result.hasMore + offset += batchSize + } + } private async _getPieceIdByCID(pieceCID: string | PieceCID): Promise { const parsedPieceCID = asPieceCID(pieceCID) if (parsedPieceCID == null) { diff --git a/packages/synapse-sdk/src/test/pdp-verifier.test.ts b/packages/synapse-sdk/src/test/pdp-verifier.test.ts index 3745867..60b4da0 100644 --- a/packages/synapse-sdk/src/test/pdp-verifier.test.ts +++ b/packages/synapse-sdk/src/test/pdp-verifier.test.ts @@ -167,6 +167,47 @@ describe('PDPVerifier', () => { }) }) + describe('getActivePieces', () => { + it('should handle AbortSignal', async () => { + const controller = new AbortController() + controller.abort() + + try { + await pdpVerifier.getActivePieces(123, { signal: controller.signal }) + assert.fail('Should have thrown an error') + } catch (error: any) { + assert.equal(error.message, 'Operation aborted') + } + }) + + it('should be callable with default options', async () => { + assert.isFunction(pdpVerifier.getActivePieces) + + mockProvider.call = async (transaction: any) => { + const data = transaction.data + if (data?.startsWith('0x39f51544') === true) { + // getActivePieces selector + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [ + [{ data: '0x1234567890123456789012345678901234567890123456789012345678901234' }], + [1, 2, 3], + [4, 5, 6], + false, + ] + ) + } + return `0x${'0'.repeat(64)}` + } + + const result = await pdpVerifier.getActivePieces(123) + assert.equal(result.pieces.length, 1) + assert.equal(result.pieceIds.length, 3) + assert.equal(result.hasMore, false) + assert.equal(result.pieces[0].data, '0x1234567890123456789012345678901234567890123456789012345678901234') + }) + }) + describe('getContractAddress', () => { it('should return the contract address', () => { const address = pdpVerifier.getContractAddress() diff --git a/packages/synapse-sdk/src/test/storage.test.ts b/packages/synapse-sdk/src/test/storage.test.ts index 0fa1cd9..f5cf53a 100644 --- a/packages/synapse-sdk/src/test/storage.test.ts +++ b/packages/synapse-sdk/src/test/storage.test.ts @@ -1,18 +1,34 @@ /* globals describe it beforeEach afterEach */ import { assert } from 'chai' import { ethers } from 'ethers' +import { CID } from 'multiformats/cid' +import { calculate as calculatePieceCID, getSizeFromPieceCID } from '../piece/index.ts' import { StorageContext } from '../storage/context.ts' import type { Synapse } from '../synapse.ts' import type { PieceCID, ProviderInfo, UploadResult } from '../types.ts' import { SIZE_CONSTANTS } from '../utils/constants.ts' import { createMockProviderInfo, createSimpleProvider, setupProviderRegistryMocks } from './test-utils.ts' +// Mock piece CIDs for testing +const mockPieceCids = [ + 'bafkzcibcd4bdomn3tgwgrh3g532zopskstnbrd2n3sxfqbze7rxt7vqn7veigmy', + 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace', +] + // Create a mock Ethereum provider that doesn't try to connect const mockEthProvider = { getTransaction: async (_hash: string) => null, getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), - call: async (_tx: any) => { - // Mock contract calls - return empty data for registry calls + call: async (tx: any) => { + // Handle getActivePieces contract calls (function selector: 0x39f51544) + if (tx.data?.startsWith('0x39f51544')) { + // For now, return empty results - individual tests can override this behavior + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[], [], [], false] + ) + } + // Mock contract calls - return empty data for other calls return '0x' }, } as any @@ -3789,6 +3805,7 @@ describe('StorageService', () => { it('should successfully fetch data set pieces', async () => { const mockWarmStorageService = { getServiceProviderRegistryAddress: () => '0x0000000000000000000000000000000000000001', + getPDPVerifierAddress: () => '0x0000000000000000000000000000000000000002', } as any const service = new StorageContext( mockSynapse, @@ -3820,15 +3837,28 @@ describe('StorageService', () => { nextChallengeEpoch: 1500, } - // Mock the PDP server getDataSet method - const serviceAny = service as any - serviceAny._pdpServer.getDataSet = async (dataSetId: number): Promise => { - assert.equal(dataSetId, 123) - return mockDataSetData + // Override the mock provider to return the expected pieces for this test + const originalCall = mockEthProvider.call + mockEthProvider.call = async (tx: any) => { + if (tx.data?.startsWith('0x39f51544')) { + // Return mock pieces as encoded CID bytes + const piecesData = mockPieceCids.map((cidStr) => { + const cid = CID.parse(cidStr) + return { data: ethers.hexlify(cid.bytes) } + }) + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [piecesData, [101, 102], [128, 128], false] + ) + } + return originalCall(tx) } const result = await service.getDataSetPieces() + // Restore the original mock + mockEthProvider.call = originalCall + assert.isArray(result) assert.equal(result.length, 2) assert.equal(result[0].toString(), mockDataSetData.pieces[0].pieceCid) @@ -3838,6 +3868,7 @@ describe('StorageService', () => { it('should handle empty data set pieces', async () => { const mockWarmStorageService = { getServiceProviderRegistryAddress: () => '0x0000000000000000000000000000000000000001', + getPDPVerifierAddress: () => '0x1234567890123456789012345678901234567890', } as any const service = new StorageContext( mockSynapse, @@ -3850,18 +3881,6 @@ describe('StorageService', () => { {} ) - const mockDataSetData = { - id: 292, - pieces: [], - nextChallengeEpoch: 1500, - } - - // Mock the PDP server getDataSet method - const serviceAny = service as any - serviceAny._pdpServer.getDataSet = async (): Promise => { - return mockDataSetData - } - const result = await service.getDataSetPieces() assert.isArray(result) @@ -3871,6 +3890,7 @@ describe('StorageService', () => { it('should handle invalid CID in response', async () => { const mockWarmStorageService = { getServiceProviderRegistryAddress: () => '0x0000000000000000000000000000000000000001', + getPDPVerifierAddress: () => '0x1234567890123456789012345678901234567890', } as any const service = new StorageContext( mockSynapse, @@ -3883,34 +3903,38 @@ describe('StorageService', () => { {} ) - const mockDataSetData = { - id: 292, - pieces: [ - { - pieceId: 101, - pieceCid: 'invalid-cid-format', - subPieceCid: 'bafkzcibeqcad6efnpwn62p5vvs5x3nh3j7xkzfgb3xtitcdm2hulmty3xx4tl3wace', - subPieceOffset: 0, - }, - ], - nextChallengeEpoch: 1500, + // Override the mock provider to return invalid CID data + const originalCall = mockEthProvider.call + mockEthProvider.call = async (tx: any) => { + if (tx.data?.startsWith('0x39f51544')) { + // Return invalid CID data - this should cause an error in the new implementation + // But the test expects it to work, so we'll return the invalid CID as bytes + const invalidCidBytes = ethers.hexlify(ethers.toUtf8Bytes('invalid-cid-format')) + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[{ data: invalidCidBytes }], [101], [128], false] + ) + } + return originalCall(tx) } - // Mock the PDP server getDataSet method - const serviceAny = service as any - serviceAny._pdpServer.getDataSet = async (): Promise => { - return mockDataSetData + // The new implementation should throw an error when trying to decode invalid CID data + try { + await service.getDataSetPieces() + assert.fail('Expected an error to be thrown for invalid CID data') + } catch (error: any) { + // The error occurs during CID.decode(), not during PieceCID validation + assert.include(error.message, 'Invalid CID version') + } finally { + // Restore the original mock + mockEthProvider.call = originalCall } - - const result = await service.getDataSetPieces() - assert.isArray(result) - assert.equal(result.length, 1) - assert.equal(result[0].toString(), 'invalid-cid-format') }) it('should handle PDP server errors', async () => { const mockWarmStorageService = { getServiceProviderRegistryAddress: () => '0x0000000000000000000000000000000000000001', + getPDPVerifierAddress: () => '0x1234567890123456789012345678901234567890', } as any const service = new StorageContext( mockSynapse, @@ -3923,17 +3947,23 @@ describe('StorageService', () => { {} ) - // Mock the PDP server getDataSet method to throw error - const serviceAny = service as any - serviceAny._pdpServer.getDataSet = async (): Promise => { - throw new Error('Data set not found: 999') + // Mock the contract call to throw an error + const originalCall = mockEthProvider.call + mockEthProvider.call = async (tx: any) => { + if (tx.data?.startsWith('0x39f51544')) { + throw new Error('Data set not found: 999') + } + return originalCall(tx) } try { await service.getDataSetPieces() - assert.fail('Should have thrown error for server error') + assert.fail('Should have thrown error for contract call error') } catch (error: any) { assert.include(error.message, 'Data set not found: 999') + } finally { + // Restore the original mock + mockEthProvider.call = originalCall } }) }) @@ -4361,4 +4391,350 @@ describe('StorageService', () => { assert.isUndefined(status.pieceId) }) }) + + describe('getPieces', () => { + it('should be available on StorageContext', () => { + // Basic test to ensure the method exists + assert.isFunction(StorageContext.prototype.getPieces) + }) + + it('should get all active pieces with pagination', async () => { + const pdpVerifierAddress = '0x5A23b7df87f59A291C26A2A1d684AD03Ce9B68DC' + const dataSetId = 123 + + // Use actual valid PieceCIDs from test data + const piece1Cid = calculatePieceCID(new Uint8Array(128).fill(1)) + const piece2Cid = calculatePieceCID(new Uint8Array(256).fill(2)) + const piece3Cid = calculatePieceCID(new Uint8Array(512).fill(3)) + + // Convert CIDs to bytes for contract encoding + const piece1Bytes = piece1Cid.bytes + const piece2Bytes = piece2Cid.bytes + const piece3Bytes = piece3Cid.bytes + + // Get actual raw sizes and leaf counts from the PieceCIDs + const piece1RawSize = getSizeFromPieceCID(piece1Cid) + const piece2RawSize = getSizeFromPieceCID(piece2Cid) + const piece3RawSize = getSizeFromPieceCID(piece3Cid) + + // Create a mock provider that returns paginated results with actual PieceCIDs + const testProvider = { + getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), + call: async (transaction: any) => { + const data = transaction.data + // getActivePieces selector: 0x39f51544 + if (data?.startsWith('0x39f51544') === true) { + // Decode the parameters to determine which page is being requested + const decoded = ethers.AbiCoder.defaultAbiCoder().decode( + ['uint256', 'uint256', 'uint256'], + `0x${data.slice(10)}` + ) + const offset = Number(decoded[1]) + + // First page: return 2 pieces with hasMore=true + if (offset === 0) { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [ + [{ data: piece1Bytes }, { data: piece2Bytes }], + [1, 2], + [piece1RawSize, piece2RawSize], + true, // hasMore + ] + ) + } + // Second page: return 1 piece with hasMore=false + if (offset === 2) { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [ + [{ data: piece3Bytes }], + [3], + [piece3RawSize], + false, // No more pieces + ] + ) + } + } + return '0x' + }, + } as any + + // Create a mock warm storage service + const mockWarmStorage = { + getPDPVerifierAddress: () => pdpVerifierAddress, + } as any + + // Create a mock synapse + const testSynapse = { + getProvider: () => testProvider, + getSigner: () => new ethers.Wallet(ethers.hexlify(ethers.randomBytes(32))), + getWarmStorageAddress: () => '0x1234567890123456789012345678901234567890', + getChainId: () => BigInt(314159), + } as any + + // Create storage context + const context = new StorageContext( + testSynapse, + mockWarmStorage, + TEST_PROVIDERS.provider1, + dataSetId, + { withCDN: false }, + {} + ) + + // Test getPieces - should collect all pages + const allPieces = [] + for await (const piece of context.getPieces({ batchSize: 2 })) { + allPieces.push(piece) + } + + assert.equal(allPieces.length, 3, 'Should return all 3 pieces across pages') + assert.equal(allPieces[0][1], 1) // pieceId + assert.equal(allPieces[0][0].toString(), piece1Cid.toString()) // pieceCid + + assert.equal(allPieces[1][1], 2) + assert.equal(allPieces[1][0].toString(), piece2Cid.toString()) + + assert.equal(allPieces[2][1], 3) + assert.equal(allPieces[2][0].toString(), piece3Cid.toString()) + }) + + it('should handle empty results', async () => { + const pdpVerifierAddress = '0x5A23b7df87f59A291C26A2A1d684AD03Ce9B68DC' + const dataSetId = 123 + + // Create a mock provider that returns no pieces + const testProvider = { + getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), + call: async (transaction: any) => { + const data = transaction.data + if (data?.startsWith('0x39f51544') === true) { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[], [], [], false] + ) + } + return '0x' + }, + } as any + + const mockWarmStorage = { + getPDPVerifierAddress: () => pdpVerifierAddress, + } as any + + const testSynapse = { + getProvider: () => testProvider, + getSigner: () => new ethers.Wallet(ethers.hexlify(ethers.randomBytes(32))), + getWarmStorageAddress: () => '0x1234567890123456789012345678901234567890', + getChainId: () => BigInt(314159), + } as any + + const context = new StorageContext( + testSynapse, + mockWarmStorage, + TEST_PROVIDERS.provider1, + dataSetId, + { withCDN: false }, + {} + ) + + const allPieces = [] + for await (const piece of context.getPieces()) { + allPieces.push(piece) + } + assert.equal(allPieces.length, 0, 'Should return empty array for data set with no pieces') + }) + + it('should handle AbortSignal in getPieces', async () => { + const pdpVerifierAddress = '0x5A23b7df87f59A291C26A2A1d684AD03Ce9B68DC' + const dataSetId = 123 + const controller = new AbortController() + + // Create a mock provider + const testProvider = { + getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), + call: async (_transaction: any) => { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[], [], [], false] + ) + }, + } as any + + const mockWarmStorage = { + getPDPVerifierAddress: () => pdpVerifierAddress, + } as any + + const testSynapse = { + getProvider: () => testProvider, + getSigner: () => new ethers.Wallet(ethers.hexlify(ethers.randomBytes(32))), + getWarmStorageAddress: () => '0x1234567890123456789012345678901234567890', + getChainId: () => BigInt(314159), + } as any + + const context = new StorageContext( + testSynapse, + mockWarmStorage, + TEST_PROVIDERS.provider1, + dataSetId, + { withCDN: false }, + {} + ) + + // Abort before making the call + controller.abort() + + try { + for await (const _piece of context.getPieces({ signal: controller.signal })) { + // Should not reach here + } + assert.fail('Should have thrown an error') + } catch (error: any) { + assert.equal(error.message, 'StorageContext getPieces failed: Operation aborted') + } + }) + + it('should work with getPieces generator', async () => { + const pdpVerifierAddress = '0x5A23b7df87f59A291C26A2A1d684AD03Ce9B68DC' + const dataSetId = 123 + + // Use actual valid PieceCIDs from test data + const piece1Cid = calculatePieceCID(new Uint8Array(128).fill(1)) + const piece2Cid = calculatePieceCID(new Uint8Array(256).fill(2)) + const piece1Bytes = piece1Cid.bytes + const piece2Bytes = piece2Cid.bytes + + // Create a mock provider that returns paginated results + const testProvider = { + getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), + call: async (transaction: any) => { + const data = transaction.data + if (data?.startsWith('0x39f51544') === true) { + const decoded = ethers.AbiCoder.defaultAbiCoder().decode( + ['uint256', 'uint256', 'uint256'], + `0x${data.slice(10)}` + ) + const offset = Number(decoded[1]) + + // First page + if (offset === 0) { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[{ data: piece1Bytes }], [1], [128], true] + ) + } + // Second page + if (offset === 1) { + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [[{ data: piece2Bytes }], [2], [256], false] + ) + } + } + return '0x' + }, + } as any + + const mockWarmStorage = { + getPDPVerifierAddress: () => pdpVerifierAddress, + } as any + + const testSynapse = { + getProvider: () => testProvider, + getSigner: () => new ethers.Wallet(ethers.hexlify(ethers.randomBytes(32))), + getWarmStorageAddress: () => '0x1234567890123456789012345678901234567890', + getChainId: () => BigInt(314159), + } as any + + const context = new StorageContext( + testSynapse, + mockWarmStorage, + TEST_PROVIDERS.provider1, + dataSetId, + { withCDN: false }, + {} + ) + + // Test the async generator + const pieces = [] + for await (const piece of context.getPieces({ batchSize: 1 })) { + pieces.push(piece) + } + + assert.equal(pieces.length, 2, 'Should yield 2 pieces') + assert.equal(pieces[0][1], 1) // pieceId + assert.equal(pieces[0][0].toString(), piece1Cid.toString()) // pieceCid + assert.equal(pieces[1][1], 2) + assert.equal(pieces[1][0].toString(), piece2Cid.toString()) + }) + + it('should handle AbortSignal in getPieces generator during iteration', async () => { + const pdpVerifierAddress = '0x5A23b7df87f59A291C26A2A1d684AD03Ce9B68DC' + const dataSetId = 123 + const controller = new AbortController() + + // Create a mock provider that returns multiple pages + let callCount = 0 + const testProvider = { + getNetwork: async () => ({ chainId: BigInt(314159), name: 'calibration' }), + call: async (transaction: any) => { + const data = transaction.data + if (data?.startsWith('0x39f51544') === true) { + callCount++ + // Abort after first page + if (callCount === 1) { + setTimeout(() => controller.abort(), 0) + const testPieceCid = calculatePieceCID(new Uint8Array(128).fill(1)) + return ethers.AbiCoder.defaultAbiCoder().encode( + ['tuple(bytes data)[]', 'uint256[]', 'uint256[]', 'bool'], + [ + [{ data: testPieceCid.bytes }], + [1], + [128], + true, // hasMore + ] + ) + } + } + return '0x' + }, + } as any + + const mockWarmStorage = { + getPDPVerifierAddress: () => pdpVerifierAddress, + } as any + + const testSynapse = { + getProvider: () => testProvider, + getSigner: () => new ethers.Wallet(ethers.hexlify(ethers.randomBytes(32))), + getWarmStorageAddress: () => '0x1234567890123456789012345678901234567890', + getChainId: () => BigInt(314159), + } as any + + const context = new StorageContext( + testSynapse, + mockWarmStorage, + TEST_PROVIDERS.provider1, + dataSetId, + { withCDN: false }, + {} + ) + + try { + const pieces = [] + for await (const piece of context.getPieces({ + batchSize: 1, + signal: controller.signal, + })) { + pieces.push(piece) + // Give the abort a chance to trigger + await new Promise((resolve) => setTimeout(resolve, 10)) + } + assert.fail('Should have thrown an error') + } catch (error: any) { + assert.equal(error.message, 'StorageContext getPieces failed: Operation aborted') + } + }) + }) }) diff --git a/utils/example-piece-details.js b/utils/example-piece-details.js new file mode 100755 index 0000000..e3ce0aa --- /dev/null +++ b/utils/example-piece-details.js @@ -0,0 +1,130 @@ +#!/usr/bin/env node + +/** + * Piece Details Example - Demonstrates how to get piece information directly from blockchain + * + * This example shows how to use the blockchain-based piece retrieval to get + * authoritative piece data directly from the PDPVerifier contract. + * + * The script will: + * 1. Find your data sets + * 2. Get piece information directly from PDPVerifier contract (source of truth) + * 3. Extract raw sizes from the PieceCID metadata + * 4. Display a summary of all pieces with their calculated sizes + * + * Usage: + * PRIVATE_KEY=0x... node example-piece-details.js + */ + +import { Synapse } from '@filoz/synapse-sdk' +import { getSizeFromPieceCID } from '@filoz/synapse-sdk/piece' + +const PRIVATE_KEY = process.env.PRIVATE_KEY +const RPC_URL = process.env.RPC_URL || 'https://api.calibration.node.glif.io/rpc/v1' +const WARM_STORAGE_ADDRESS = process.env.WARM_STORAGE_ADDRESS // Optional - will use default for network + +if (!PRIVATE_KEY) { + console.error('ERROR: PRIVATE_KEY environment variable is required') + console.error('Usage: PRIVATE_KEY=0x... node example-piece-details.js') + process.exit(1) +} + +async function main() { + console.log('=== Synapse SDK Piece Details Example ===\n') + + // Create Synapse instance + const synapseOptions = { + privateKey: PRIVATE_KEY, + rpcURL: RPC_URL, + } + + if (WARM_STORAGE_ADDRESS) { + synapseOptions.warmStorageAddress = WARM_STORAGE_ADDRESS + } + + const synapse = await Synapse.create(synapseOptions) + console.log('āœ… Synapse instance created') + + // Declare dataSetInfo in the outer scope + let dataSetInfo = null + + try { + // Find data sets with pieces + console.log('\nšŸ“Š Finding data sets...') + const dataSets = await synapse.storage.findDataSets() + console.log(`Found ${dataSets.length} data set(s)`) + + if (dataSets.length === 0) { + console.log('āŒ No data sets found. Please upload some data first using example-storage-simple.js') + return + } + + // Find a data set with pieces (currentPieceCount > 0) + const dataSetWithPieces = dataSets.find((ds) => ds.currentPieceCount > 0) + if (!dataSetWithPieces) { + console.log('āŒ No data sets with pieces found. Please upload some data first using example-storage-simple.js') + return + } + + // Map the data set properties to what we expect + dataSetInfo = { + dataSetId: dataSetWithPieces.pdpVerifierDataSetId, + providerId: dataSetWithPieces.providerId, + pieceCount: dataSetWithPieces.currentPieceCount, + clientDataSetId: dataSetWithPieces.clientDataSetId, + isLive: dataSetWithPieces.isLive, + withCDN: dataSetWithPieces.withCDN, + } + + console.log(`\nšŸ“Š Data Set Summary:`) + console.log(` PDP Verifier Data Set ID: ${dataSetInfo.dataSetId}`) + console.log(` Client Data Set ID: ${dataSetInfo.clientDataSetId}`) + console.log(` Provider ID: ${dataSetInfo.providerId}`) + console.log(` Piece Count: ${dataSetInfo.pieceCount}`) + console.log(` Is Live: ${dataSetInfo.isLive}`) + console.log(` With CDN: ${dataSetInfo.withCDN}`) + + // Get all pieces directly from blockchain + console.log('\n--- Getting Pieces from Blockchain (PDPVerifier) ---') + try { + const context = await synapse.storage.createContext({ + dataSetId: dataSetInfo.dataSetId, + providerId: dataSetInfo.providerId, + }) + + // Collect all pieces using the generator + const pieces = [] + for await (const [pieceCid, pieceId] of context.getPieces()) { + const rawSize = getSizeFromPieceCID(pieceCid) + pieces.push({ pieceCid, pieceId, rawSize }) + } + + console.log(`āœ… Retrieved ${pieces.length} pieces from blockchain:`) + + pieces.forEach((piece, index) => { + console.log(`\n Piece ${index + 1}:`) + console.log(` ID: ${piece.pieceId}`) + console.log(` CID: ${piece.pieceCid}`) + console.log(` Raw Size: ${piece.rawSize} bytes (${(piece.rawSize / 1024).toFixed(2)} KB)`) + }) + + // Calculate totals + const totalRawSize = pieces.reduce((sum, piece) => sum + piece.rawSize, 0) + + console.log(`\nšŸ“ˆ Data Set Summary:`) + console.log(` Total Pieces: ${pieces.length}`) + console.log(` Total Raw Size: ${totalRawSize} bytes (${(totalRawSize / 1024).toFixed(2)} KB)`) + console.log(` Average Piece Size: ${(totalRawSize / pieces.length).toFixed(2)} bytes`) + + console.log(`\nāœ… All data retrieved directly from PDPVerifier contract (blockchain)`) + console.log(` This is the authoritative source of truth, not Curio`) + } catch (error) { + console.error('āŒ Error getting pieces from blockchain:', error.message) + } + } catch (error) { + console.error('āŒ Error:', error.message) + console.error('Stack trace:', error.stack) + } +} + +main().catch(console.error)