Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 12 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "v22.15.0"
node-version: 'v22.15.0'
- name: Cache node_modules
uses: actions/cache@v3
env:
Expand All @@ -42,7 +42,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest]
node: ["18.20.4", "v20.19.0", "v22.15.0"]
node: ['18.20.4', 'v20.19.0', 'v22.15.0']

steps:
- uses: actions/checkout@v4
Expand All @@ -66,7 +66,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "v22.15.0"
node-version: 'v22.15.0'
- name: Cache node_modules
uses: actions/cache@v3
env:
Expand All @@ -87,6 +87,8 @@ jobs:
RPCS: '{ "1": {"rpc": "https://rpc.eth.gateway.fm", "chainId": 1, "network": "mainet", "chunkSize": 100}, "137": {"rpc": "https://polygon.meowrpc.com", "chainId": 137, "network": "polygon", "chunkSize": 100 }, "80001": {"rpc": "https://rpc-mumbai.maticvigil.com","chainId": 80001, "network": "polygon-mumbai", "chunkSize": 100 } }'
DB_URL: 'http://localhost:8108/?apiKey=xyz'
DB_TYPE: 'typesense'
DB_USERNAME: 'elastic'
DB_PASSWORD: 'changeme'
FEE_TOKENS: '{ "1": "0x967da4048cD07aB37855c090aAF366e4ce1b9F48", "137": "0x282d8efCe846A88B159800bd4130ad77443Fa1A1", "80001": "0xd8992Ed72C445c35Cb4A2be468568Ed1079357c8", "56": "0xDCe07662CA8EbC241316a15B611c89711414Dd1a" }'
FEE_AMOUNT: '{ "amount": 1, "unit": "MB" }'
- uses: actions/upload-artifact@v4
Expand All @@ -100,7 +102,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "v22.15.0"
node-version: 'v22.15.0'
- name: Cache node_modules
uses: actions/cache@v3
env:
Expand Down Expand Up @@ -156,6 +158,8 @@ jobs:
RPCS: '{ "8996": {"rpc": "http://127.0.0.1:8545", "chainId": 8996, "network": "development", "chunkSize": 100}}'
DB_URL: 'http://localhost:9200'
DB_TYPE: 'elasticsearch'
DB_USERNAME: 'elastic'
DB_PASSWORD: 'changeme'
FEE_TOKENS: '{ "1": "0x967da4048cD07aB37855c090aAF366e4ce1b9F48", "137": "0x282d8efCe846A88B159800bd4130ad77443Fa1A1", "80001": "0xd8992Ed72C445c35Cb4A2be468568Ed1079357c8", "56": "0xDCe07662CA8EbC241316a15B611c89711414Dd1a" }'
FEE_AMOUNT: '{ "amount": 1, "unit": "MB" }'
ASSET_PURGATORY_URL: 'https://raw.githubusercontent.com/oceanprotocol/list-purgatory/main/list-assets.json'
Expand All @@ -178,7 +182,7 @@ jobs:
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: "v22.15.0"
node-version: 'v22.15.0'

- name: Cache node_modules
uses: actions/cache@v3
Expand Down Expand Up @@ -258,6 +262,8 @@ jobs:
P2P_ENABLE_AUTONAT: 'false'
ALLOWED_ADMINS: '["0xe2DD09d719Da89e5a3D0F2549c7E24566e947260"]'
DB_TYPE: 'elasticsearch'
DB_USERNAME: 'elastic'
DB_PASSWORD: 'changeme'
MAX_REQ_PER_MINUTE: 320
MAX_CONNECTIONS_PER_MINUTE: 320
DOCKER_COMPUTE_ENVIRONMENTS: '[{"socketPath":"/var/run/docker.sock","resources":[{"id":"disk","total":10}],"storageExpiry":604800,"maxJobDuration":3600,"minJobDuration": 60,"fees":{"8996":[{"prices":[{"id":"cpu","price":1}]}]},"free":{"maxJobDuration":60,"minJobDuration": 10,"maxJobs":3,"resources":[{"id":"cpu","max":1},{"id":"ram","max":1},{"id":"disk","max":1}]}}]'
Expand Down Expand Up @@ -309,7 +315,7 @@ jobs:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: "v22.15.0"
node-version: 'v22.15.0'
- name: Cache node_modules
uses: actions/cache@v3
env:
Expand Down
28 changes: 15 additions & 13 deletions src/components/core/handler/ddoHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -798,11 +798,6 @@ export class FindDdoHandler extends CommandHandler {
}
}

export async function skipValidation(): Promise<boolean> {
const configuration = await getConfiguration()
return configuration.validateUnsignedDDO
}

export class ValidateDDOHandler extends CommandHandler {
validate(command: ValidateDDOCommand): ValidateParams {
let validation = validateCommandParameters(command, ['ddo'])
Expand All @@ -815,8 +810,15 @@ export class ValidateDDOHandler extends CommandHandler {

async handle(task: ValidateDDOCommand): Promise<P2PCommandResponse> {
const validationResponse = await this.verifyParamsAndRateLimits(task)
const shouldSkipValidation = await skipValidation()
if (!shouldSkipValidation) {
if (this.shouldDenyTaskHandling(validationResponse)) {
return validationResponse
}
let shouldSign = false
const configuration = await getConfiguration()
if (configuration.validateUnsignedDDO) {
shouldSign = true
}
if (task.authorization || task.signature || task.nonce || task.publisherAddress) {
const validationResponse = await this.validateTokenOrSignature(
task.authorization,
task.publisherAddress,
Expand All @@ -827,10 +829,7 @@ export class ValidateDDOHandler extends CommandHandler {
if (validationResponse.status.httpStatus !== 200) {
return validationResponse
}
}

if (this.shouldDenyTaskHandling(validationResponse)) {
return validationResponse
shouldSign = true
}

try {
Expand All @@ -849,9 +848,12 @@ export class ValidateDDOHandler extends CommandHandler {
status: { httpStatus: 400, error: `Validation error: ${validation[1]}` }
}
}
const signature = await getValidationSignature(JSON.stringify(task.ddo))
return {
stream: Readable.from(JSON.stringify(signature)),
stream: shouldSign
? Readable.from(
JSON.stringify(await getValidationSignature(JSON.stringify(task.ddo)))
)
: null,
status: { httpStatus: 200 }
}
} catch (error) {
Expand Down
2 changes: 2 additions & 0 deletions src/test/.env.test
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ PRIVATE_KEY=0xc594c6e5def4bab63ac29eed19a134c130388f74f019bc74b8f4389df2837a58
RPCS='{ "8996": {"rpc": "http://127.0.0.1:8545", "chainId": 8996, "network": "development", "chunkSize": 100}}'
INDEXER_NETWORKS='[8996]'
DB_URL=http://localhost:9200
DB_USERNAME=elastic
DB_PASSWORD=changeme
IPFS_GATEWAY=https://ipfs.io/
ARWEAVE_GATEWAY=https://arweave.net/
NODE1_PRIVATE_KEY=0xcb345bd2b11264d523ddaf383094e2675c420a17511c3102a53817f13474a7ff
Expand Down
4 changes: 3 additions & 1 deletion src/test/integration/database.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ const typesenseConfig: OceanNodeDBConfig = {

const elasticConfig: OceanNodeDBConfig = {
url: 'http://localhost:9200',
dbType: DB_TYPES.ELASTIC_SEARCH
dbType: DB_TYPES.ELASTIC_SEARCH,
username: 'elastic',
password: 'changeme'
}

const emptyDBConfig: OceanNodeDBConfig = {
Expand Down
4 changes: 3 additions & 1 deletion src/test/integration/elasticsearch.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ import { SQLLiteNonceDatabase } from '../../components/database/SQLLiteNonceData

const dbConfig = {
url: 'http://localhost:9200',
dbType: DB_TYPES.ELASTIC_SEARCH
dbType: DB_TYPES.ELASTIC_SEARCH,
username: 'elastic',
password: 'changeme'
}
const elasticsearch: Database = await Database.init(dbConfig)

Expand Down
40 changes: 37 additions & 3 deletions src/test/unit/indexer/validation.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,9 @@ import { PROTOCOL_COMMANDS } from '../../../utils/constants.js'
import { RPCS } from '../../../@types/blockchain.js'
import { Database } from '../../../components/database/index.js'
import { OceanNodeConfig } from '../../../@types/OceanNode.js'
import sinon, { SinonSandbox } from 'sinon'
// import sinon, { SinonSandbox } from 'sinon'
import { ethers } from 'ethers'
import { Readable } from 'stream'

describe('Schema validation tests', () => {
const mockSupportedNetworks: RPCS = getMockSupportedNetworks()
Expand All @@ -26,7 +28,7 @@ describe('Schema validation tests', () => {
let mockDatabase: Database
let config: OceanNodeConfig
let oceanNode: OceanNode
let sandbox: SinonSandbox
// let sandbox: SinonSandbox

// For token validation, please check integration test cases
before(async () => {
Expand All @@ -48,7 +50,7 @@ describe('Schema validation tests', () => {
)
envOverrides = await setupEnvironment(TEST_ENV_CONFIG_FILE, envOverrides)
config = await getConfiguration(true)
sandbox = sinon.createSandbox()
/* sandbox = sinon.createSandbox()
sandbox.stub(Database, 'init').resolves({
nonce: {},
c2d: {},
Expand All @@ -60,6 +62,7 @@ describe('Schema validation tests', () => {
order: {},
ddoState: {}
} as any)
*/
mockDatabase = await Database.init(config.dbConfig)
oceanNode = await OceanNode.getInstance(
config,
Expand Down Expand Up @@ -166,4 +169,35 @@ describe('Schema validation tests', () => {

expect(result.status.httpStatus).to.equal(401)
})

it('should have node signature for valid user', async () => {
const handler = new ValidateDDOHandler(oceanNode)
const ddoInstance = DDOManager.getDDOClass(DDOExample)
const ddo: DDO = {
...(ddoInstance.getDDOData() as DDO)
}
const wallet = new ethers.Wallet(
'0xef4b441145c1d0f3b4bc6d61d29f5c6e502359481152f869247c7a4244d45209'
)
const nonce = Date.now().toString()
const message = String((await wallet.getAddress()) + nonce)
const consumerMessage = ethers.solidityPackedKeccak256(
['bytes'],
[ethers.hexlify(ethers.toUtf8Bytes(message))]
)
const messageHashBytes = ethers.toBeArray(consumerMessage)
const signature = await wallet.signMessage(messageHashBytes)
const task = {
ddo,
publisherAddress: await wallet.getAddress(),
nonce,
signature,
command: PROTOCOL_COMMANDS.VALIDATE_DDO
}

const result = await handler.handle(task)

expect(result.status.httpStatus).to.equal(200)
expect(result.stream).to.be.instanceOf(Readable)
})
})
18 changes: 14 additions & 4 deletions src/utils/database.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,21 @@ export function hasValidDBConfiguration(configuration: OceanNodeDBConfig): boole
if (!configuration || !configuration.dbType) {
return false
}
return (
configuration.url &&
URLUtils.isValidUrl(configuration.url) &&
[DB_TYPES.ELASTIC_SEARCH, DB_TYPES.TYPESENSE].includes(configuration.dbType)
const hasValidUrl = configuration.url && URLUtils.isValidUrl(configuration.url)
const hasValidDbType = [DB_TYPES.ELASTIC_SEARCH, DB_TYPES.TYPESENSE].includes(
configuration.dbType
)

if (!hasValidUrl || !hasValidDbType) {
return false
}

// For Elasticsearch, username and password are required
if (configuration.dbType === DB_TYPES.ELASTIC_SEARCH) {
return !!(configuration.username && configuration.password)
}

return true
}

// we can use this to check if DB connection is available
Expand Down
Loading