diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce3c9cef3..815304698 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: "v22.15.0" + node-version: 'v22.15.0' - name: Cache node_modules uses: actions/cache@v3 env: @@ -42,7 +42,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest] - node: ["18.20.4", "v20.19.0", "v22.15.0"] + node: ['18.20.4', 'v20.19.0', 'v22.15.0'] steps: - uses: actions/checkout@v4 @@ -66,7 +66,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: "v22.15.0" + node-version: 'v22.15.0' - name: Cache node_modules uses: actions/cache@v3 env: @@ -87,6 +87,8 @@ jobs: RPCS: '{ "1": {"rpc": "https://rpc.eth.gateway.fm", "chainId": 1, "network": "mainet", "chunkSize": 100}, "137": {"rpc": "https://polygon.meowrpc.com", "chainId": 137, "network": "polygon", "chunkSize": 100 }, "80001": {"rpc": "https://rpc-mumbai.maticvigil.com","chainId": 80001, "network": "polygon-mumbai", "chunkSize": 100 } }' DB_URL: 'http://localhost:8108/?apiKey=xyz' DB_TYPE: 'typesense' + DB_USERNAME: 'elastic' + DB_PASSWORD: 'changeme' FEE_TOKENS: '{ "1": "0x967da4048cD07aB37855c090aAF366e4ce1b9F48", "137": "0x282d8efCe846A88B159800bd4130ad77443Fa1A1", "80001": "0xd8992Ed72C445c35Cb4A2be468568Ed1079357c8", "56": "0xDCe07662CA8EbC241316a15B611c89711414Dd1a" }' FEE_AMOUNT: '{ "amount": 1, "unit": "MB" }' - uses: actions/upload-artifact@v4 @@ -100,7 +102,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: "v22.15.0" + node-version: 'v22.15.0' - name: Cache node_modules uses: actions/cache@v3 env: @@ -156,6 +158,8 @@ jobs: RPCS: '{ "8996": {"rpc": "http://127.0.0.1:8545", "chainId": 8996, "network": "development", "chunkSize": 100}}' DB_URL: 'http://localhost:9200' DB_TYPE: 'elasticsearch' + DB_USERNAME: 'elastic' + DB_PASSWORD: 'changeme' FEE_TOKENS: '{ "1": "0x967da4048cD07aB37855c090aAF366e4ce1b9F48", "137": "0x282d8efCe846A88B159800bd4130ad77443Fa1A1", "80001": "0xd8992Ed72C445c35Cb4A2be468568Ed1079357c8", "56": "0xDCe07662CA8EbC241316a15B611c89711414Dd1a" }' FEE_AMOUNT: '{ "amount": 1, "unit": "MB" }' ASSET_PURGATORY_URL: 'https://raw.githubusercontent.com/oceanprotocol/list-purgatory/main/list-assets.json' @@ -178,7 +182,7 @@ jobs: - name: Set up Node.js uses: actions/setup-node@v4 with: - node-version: "v22.15.0" + node-version: 'v22.15.0' - name: Cache node_modules uses: actions/cache@v3 @@ -258,6 +262,8 @@ jobs: P2P_ENABLE_AUTONAT: 'false' ALLOWED_ADMINS: '["0xe2DD09d719Da89e5a3D0F2549c7E24566e947260"]' DB_TYPE: 'elasticsearch' + DB_USERNAME: 'elastic' + DB_PASSWORD: 'changeme' MAX_REQ_PER_MINUTE: 320 MAX_CONNECTIONS_PER_MINUTE: 320 DOCKER_COMPUTE_ENVIRONMENTS: '[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":10}],"storageExpiry":604800,"maxJobDuration":3600,"minJobDuration": 60,"fees":{"8996":[{"prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":60,"minJobDuration": 10,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1},{"id":"disk","max":1}]}}]' @@ -309,7 +315,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: - node-version: "v22.15.0" + node-version: 'v22.15.0' - name: Cache node_modules uses: actions/cache@v3 env: diff --git a/src/components/core/handler/ddoHandler.ts b/src/components/core/handler/ddoHandler.ts index 6573514e1..71fbf0bcb 100644 --- a/src/components/core/handler/ddoHandler.ts +++ b/src/components/core/handler/ddoHandler.ts @@ -798,11 +798,6 @@ export class FindDdoHandler extends CommandHandler { } } -export async function skipValidation(): Promise { - const configuration = await getConfiguration() - return configuration.validateUnsignedDDO -} - export class ValidateDDOHandler extends CommandHandler { validate(command: ValidateDDOCommand): ValidateParams { let validation = validateCommandParameters(command, ['ddo']) @@ -815,8 +810,15 @@ export class ValidateDDOHandler extends CommandHandler { async handle(task: ValidateDDOCommand): Promise { const validationResponse = await this.verifyParamsAndRateLimits(task) - const shouldSkipValidation = await skipValidation() - if (!shouldSkipValidation) { + if (this.shouldDenyTaskHandling(validationResponse)) { + return validationResponse + } + let shouldSign = false + const configuration = await getConfiguration() + if (configuration.validateUnsignedDDO) { + shouldSign = true + } + if (task.authorization || task.signature || task.nonce || task.publisherAddress) { const validationResponse = await this.validateTokenOrSignature( task.authorization, task.publisherAddress, @@ -827,10 +829,7 @@ export class ValidateDDOHandler extends CommandHandler { if (validationResponse.status.httpStatus !== 200) { return validationResponse } - } - - if (this.shouldDenyTaskHandling(validationResponse)) { - return validationResponse + shouldSign = true } try { @@ -849,9 +848,12 @@ export class ValidateDDOHandler extends CommandHandler { status: { httpStatus: 400, error: `Validation error: ${validation[1]}` } } } - const signature = await getValidationSignature(JSON.stringify(task.ddo)) return { - stream: Readable.from(JSON.stringify(signature)), + stream: shouldSign + ? Readable.from( + JSON.stringify(await getValidationSignature(JSON.stringify(task.ddo))) + ) + : null, status: { httpStatus: 200 } } } catch (error) { diff --git a/src/test/.env.test b/src/test/.env.test index 4a2079fcc..0d95e5057 100644 --- a/src/test/.env.test +++ b/src/test/.env.test @@ -4,6 +4,8 @@ PRIVATE_KEY=0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58 RPCS='{ "8996": {"rpc": "http://127.0.0.1:8545", "chainId": 8996, "network": "development", "chunkSize": 100}}' INDEXER_NETWORKS='[8996]' DB_URL=http://localhost:9200 +DB_USERNAME=elastic +DB_PASSWORD=changeme IPFS_GATEWAY=https://ipfs.io/ ARWEAVE_GATEWAY=https://arweave.net/ NODE1_PRIVATE_KEY=0xcb345bd2b11264d523ddaf383094e2675c420a17511c3102a53817f13474a7ff diff --git a/src/test/integration/database.test.ts b/src/test/integration/database.test.ts index 40814ec02..bf5aaa458 100644 --- a/src/test/integration/database.test.ts +++ b/src/test/integration/database.test.ts @@ -15,7 +15,9 @@ const typesenseConfig: OceanNodeDBConfig = { const elasticConfig: OceanNodeDBConfig = { url: 'http://localhost:9200', - dbType: DB_TYPES.ELASTIC_SEARCH + dbType: DB_TYPES.ELASTIC_SEARCH, + username: 'elastic', + password: 'changeme' } const emptyDBConfig: OceanNodeDBConfig = { diff --git a/src/test/integration/elasticsearch.test.ts b/src/test/integration/elasticsearch.test.ts index baba2308c..171b6b727 100644 --- a/src/test/integration/elasticsearch.test.ts +++ b/src/test/integration/elasticsearch.test.ts @@ -13,7 +13,9 @@ import { SQLLiteNonceDatabase } from '../../components/database/SQLLiteNonceData const dbConfig = { url: 'http://localhost:9200', - dbType: DB_TYPES.ELASTIC_SEARCH + dbType: DB_TYPES.ELASTIC_SEARCH, + username: 'elastic', + password: 'changeme' } const elasticsearch: Database = await Database.init(dbConfig) diff --git a/src/test/unit/indexer/validation.test.ts b/src/test/unit/indexer/validation.test.ts index 9bcc1cbc3..205614b18 100644 --- a/src/test/unit/indexer/validation.test.ts +++ b/src/test/unit/indexer/validation.test.ts @@ -17,7 +17,9 @@ import { PROTOCOL_COMMANDS } from '../../../utils/constants.js' import { RPCS } from '../../../@types/blockchain.js' import { Database } from '../../../components/database/index.js' import { OceanNodeConfig } from '../../../@types/OceanNode.js' -import sinon, { SinonSandbox } from 'sinon' +// import sinon, { SinonSandbox } from 'sinon' +import { ethers } from 'ethers' +import { Readable } from 'stream' describe('Schema validation tests', () => { const mockSupportedNetworks: RPCS = getMockSupportedNetworks() @@ -26,7 +28,7 @@ describe('Schema validation tests', () => { let mockDatabase: Database let config: OceanNodeConfig let oceanNode: OceanNode - let sandbox: SinonSandbox + // let sandbox: SinonSandbox // For token validation, please check integration test cases before(async () => { @@ -48,7 +50,7 @@ describe('Schema validation tests', () => { ) envOverrides = await setupEnvironment(TEST_ENV_CONFIG_FILE, envOverrides) config = await getConfiguration(true) - sandbox = sinon.createSandbox() + /* sandbox = sinon.createSandbox() sandbox.stub(Database, 'init').resolves({ nonce: {}, c2d: {}, @@ -60,6 +62,7 @@ describe('Schema validation tests', () => { order: {}, ddoState: {} } as any) + */ mockDatabase = await Database.init(config.dbConfig) oceanNode = await OceanNode.getInstance( config, @@ -166,4 +169,35 @@ describe('Schema validation tests', () => { expect(result.status.httpStatus).to.equal(401) }) + + it('should have node signature for valid user', async () => { + const handler = new ValidateDDOHandler(oceanNode) + const ddoInstance = DDOManager.getDDOClass(DDOExample) + const ddo: DDO = { + ...(ddoInstance.getDDOData() as DDO) + } + const wallet = new ethers.Wallet( + '0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209' + ) + const nonce = Date.now().toString() + const message = String((await wallet.getAddress()) + nonce) + const consumerMessage = ethers.solidityPackedKeccak256( + ['bytes'], + [ethers.hexlify(ethers.toUtf8Bytes(message))] + ) + const messageHashBytes = ethers.toBeArray(consumerMessage) + const signature = await wallet.signMessage(messageHashBytes) + const task = { + ddo, + publisherAddress: await wallet.getAddress(), + nonce, + signature, + command: PROTOCOL_COMMANDS.VALIDATE_DDO + } + + const result = await handler.handle(task) + + expect(result.status.httpStatus).to.equal(200) + expect(result.stream).to.be.instanceOf(Readable) + }) }) diff --git a/src/utils/database.ts b/src/utils/database.ts index 83c910c30..b87de11a8 100644 --- a/src/utils/database.ts +++ b/src/utils/database.ts @@ -23,11 +23,21 @@ export function hasValidDBConfiguration(configuration: OceanNodeDBConfig): boole if (!configuration || !configuration.dbType) { return false } - return ( - configuration.url && - URLUtils.isValidUrl(configuration.url) && - [DB_TYPES.ELASTIC_SEARCH, DB_TYPES.TYPESENSE].includes(configuration.dbType) + const hasValidUrl = configuration.url && URLUtils.isValidUrl(configuration.url) + const hasValidDbType = [DB_TYPES.ELASTIC_SEARCH, DB_TYPES.TYPESENSE].includes( + configuration.dbType ) + + if (!hasValidUrl || !hasValidDbType) { + return false + } + + // For Elasticsearch, username and password are required + if (configuration.dbType === DB_TYPES.ELASTIC_SEARCH) { + return !!(configuration.username && configuration.password) + } + + return true } // we can use this to check if DB connection is available