diff --git a/README.md b/README.md index a944615..f08dd4f 100644 --- a/README.md +++ b/README.md @@ -90,6 +90,14 @@ export INDEXING_RETRY_INTERVAL='3000' export AVOID_LOOP_RUN='true/false' ``` +- Optional, set SSI_WALLET_API, SSI_WALLET_ID, SSI_WALLET_DID to support v5 DDOs (assets using credentialSubject and SSI policy flows). + +``` +export SSI_WALLET_API="https://your-ssi-wallet.example/api" +export SSI_WALLET_ID="did:example:your-wallet-did-or-id" +export export SSI_WALLET_DID="did:example" +``` + ### Build the TypeScript code @@ -108,7 +116,7 @@ npm run cli h E.g. run publish command -Make sure to update chainId from the assets from `metadata` folder. +Make sure to update chainId and serviceEnpoint from the assets from `metadata` folder. ``` npm run cli publish metadata/simpleDownloadDataset.json @@ -186,23 +194,29 @@ npm run cli [options] **Download:** - **Positional:** - `npm run cli download did:op:123 ./custom-folder` + `npm run cli download did:op:123 ./custom-folder serviceId` - **Named Options:** - `npm run cli download --did did:op:123 --folder ./custom-folder` + `npm run cli download --did did:op:123 --folder ./custom-folder --service serviceId` (Order of `--did` and `--folder` does not matter.) +- **Rules:** + serviceId is optional. If omitted, the CLI defaults to the first available download service. + --- **Start Compute:** - **Positional:** - `npm run cli startCompute -- did1,did2 algoDid env1 maxJobDuration paymentToken resources --accept true` + `npm run cli startCompute -- did1,did2 algoDid env1 maxJobDuration paymentToken resources svc1,svc2 algoServiceId` - **Named Options:** - `npm run cli startCompute --datasets did1,did2 --algo algoDid --env env1 --maxJobDuration maxJobDuration --token paymentToken --resources resources --accept true` + `npm run cli startCompute --datasets did1,did2 --algo algoDid --env env1 --maxJobDuration maxJobDuration --token paymentToken --resources resources --accept true --services svc1,svc2 ----algo-service algoServiceId` (Options can be provided in any order.) +- **Rules:** + serviceIds and algoServiceId are optional. If omitted, the CLI defaults to the first available service. + - `maxJobDuration` is a required parameter an represents the time measured in seconds for job maximum execution, the payment is based on this maxJobDuration value, user needs to provide this. - `paymentToken` is required and represents the address of the token that is supported by the environment for processing the compute job payment. It can be retrieved from `getComputeEnvironments` command output. @@ -218,7 +232,11 @@ e.g.: `'[{"id":"cpu","amount":3},{"id":"ram","amount":16772672536},{"id":"disk", `npm run cli startFreeCompute did1,did2 algoDid env1` - **Named Options:** - `npm run cli startFreeCompute --datasets did1,did2 --algo algoDid --env env1` + `npm run cli startFreeCompute --datasets did1,did2 --algo algoDid --env env1 --services svc1,svc2 ----algo-service algoServiceId` + (Options can be provided in any order.) + +- **Rules:** + serviceIds and algoServiceId are optional. If omitted, the CLI defaults to the first available service.` (Options can be provided in any order.) --- @@ -416,6 +434,8 @@ e.g.: `'[{"id":"cpu","amount":3},{"id":"ram","amount":16772672536},{"id":"disk", - **download:** `-d, --did ` `-f, --folder [destinationFolder]` (Default: `.`) + `-s, --service ` (Optional, target a specific service) + - **startCompute:** `-d, --datasets ` @@ -426,11 +446,15 @@ e.g.: `'[{"id":"cpu","amount":3},{"id":"ram","amount":16772672536},{"id":"disk", `-t, --token ` `--resources ` `--amountToDeposit ` (Id `''`, it will fallback to initialize compute payment amount.) + `-s, --services [serviceIds]` (Optional, comma-separated; must match datasetDids length, positional 1–1) + `-x, --algo-service [algoServiceId]` (Optional, override algorithm service) - **startFreeCompute:** `-d, --datasets ` `-a, --algo ` `-e, --env ` + `-s, --services [serviceIds]` (Optional, comma-separated; must match datasetDids length, positional 1–1) + `-x, --algo-service [algoServiceId]` (Optional, override algorithm service) - **getComputeEnvironments:** diff --git a/metadata/jsAlgoV5.json b/metadata/jsAlgoV5.json new file mode 100644 index 0000000..b88d45a --- /dev/null +++ b/metadata/jsAlgoV5.json @@ -0,0 +1,199 @@ +{ + "@context": [ + "https://www.w3.org/ns/credentials/v2" + ], + "id": "did:ope", + "version": "5.0.0", + "credentialSubject": { + "chainId": 8996, + "metadata": { + "created": "2025-09-09T11:57:52Z", + "updated": "2025-09-09T11:57:52Z", + "type": "algorithm", + "name": "cli algo test", + "description": { + "@value": "testing algo", + "@direction": "", + "@language": "" + }, + "tags": [ + "test" + ], + "author": "", + "license": { + "name": "https://raw.githubusercontent.com/oceanprotocol/c2d-examples/main/branin_and_gpr/branin.arff", + "licenseDocuments": [ + { + "sha256": "71a828f4ddc3fee436d80fc5b02535fc5e13ae8f19eae87ec02734bb01126c5b", + "mirrors": [ + { + "method": "get", + "type": "url", + "url": "https://raw.githubusercontent.com/oceanprotocol/c2d-examples/main/branin_and_gpr/branin.arff" + } + ], + "name": "https://raw.githubusercontent.com/oceanprotocol/c2d-examples/main/branin_and_gpr/branin.arff", + "fileType": "text/plain; charset=utf-8" + } + ] + }, + "links": {}, + "additionalInformation": { + "termsAndConditions": true + }, + "algorithm": { + "language": "arff", + "version": "0.1", + "container": { + "entrypoint": "node $ALGO", + "image": "node", + "tag": "latest", + "checksum": "sha256:c3688c7b5cc3b159ddf4f744594de62461df7ef43124378f118cb470e4f72bcb" + } + }, + "copyrightHolder": "", + "providedBy": "" + }, + "services": [ + { + "compute": { + "publisherTrustedAlgorithms": [], + "publisherTrustedAlgorithmPublishers": [ + "*" + ], + "allowRawAlgorithm": false, + "allowNetworkAccess": true + }, + "credentials": { + "allow": [ + { + "values": [ + { + "request_credentials": [ + { + "format": "jwt_vc_json", + "policies": [], + "type": "UniversityDegree" + } + ], + "vc_policies": [ + "not-before", + "revoked-status-list", + "signature" + ] + } + ], + "type": "SSIpolicy" + }, + { + "values": [ + { + "address": "*" + } + ], + "type": "address" + } + ], + "match_deny": "any", + "deny": [] + }, + "name": "test service", + "files": { + "datatokenAddress": "0x0", + "nftAddress": "0x0", + "files": [ + { + "type": "url", + "url": "https://raw.githubusercontent.com/oceanprotocol/c2d-examples/main/branin_and_gpr/branin.arff", + "method": "GET" + } + ] + }, + "description": { + "@value": "testing new service", + "@direction": "ltr", + "@language": "en" + }, + "id": "23033c06ea9adc6ff5dff42938417c7750e75efbdfbfecf5d6d544705450df24", + "datatokenAddress": "0x0", + "serviceEndpoint": "https://ocean-node-vm3.oceanenterprise.io", + "state": 0, + "type": "compute", + "timeout": 31556952 + } + ], + "nftAddress": "0x0", + "credentials": { + "allow": [ + { + "values": [ + { + "request_credentials": [ + { + "format": "jwt_vc_json", + "policies": [], + "type": "UniversityDegree" + } + ], + "vc_policies": [ + "not-before", + "revoked-status-list", + "signature" + ] + } + ], + "type": "SSIpolicy" + }, + { + "values": [ + { + "address": "*" + } + ], + "type": "address" + } + ], + "deny": [], + "match_deny": "any" + } + }, + "additionalDdos": [], + "type": [ + "VerifiableCredential" + ], + "issuer": "did:jwk:eyJrdHkiOiJPS1AiLCJjcnYiOiJFZDI1NTE5Iiwia2lkIjoiekxlRGh1UG15QndrampiV1pRZTBPeHdEMjMxYmx3aXFTMGQ0VTJETnZFbyIsIngiOiJta3NvaEladWlsblJWWmhTbmwxNGRpdTZZWDF3UUFFX0ozbWJoWDNDOFRjIn0", + "indexedMetadata": { + "stats": [ + { + "symbol": "OEAT", + "name": "Access Token", + "orders": 0, + "datatokenAddress": "", + "serviceId": "23033c06ea9adc6ff5dff42938417c7750e75efbdfbfecf5d6d544705450df24", + "prices": [ + { + "exchangeId": "", + "price": "2.0", + "contract": "", + "type": "fixedrate", + "token": "" + } + ] + } + ], + "nft": { + "state": 0, + "address": "", + "name": "Data NFT", + "symbol": "OEC-NFT", + "owner": "", + "created": "", + "tokenURI": "" + }, + "event": { + }, + "purgatory": { + "state": false + } + } +} \ No newline at end of file diff --git a/metadata/simpleDownloadDatasetV5.json b/metadata/simpleDownloadDatasetV5.json new file mode 100644 index 0000000..14bd89c --- /dev/null +++ b/metadata/simpleDownloadDatasetV5.json @@ -0,0 +1,167 @@ +{ + "@context": [ + "https://www.w3.org/ns/credentials/v2" + ], + "id": "did:ope", + "version": "5.0.0", + "type": ["VerifiableCredential"], + "additionalDdos": [], + "credentialSubject": { + "chainId": 8996, + "metadata": { + "created": "2025-02-19T10:23:59Z", + "updated": "2025-02-19T10:23:59Z", + "type": "dataset", + "name": "The Digital Project Management Office (DigitalPMO)", + "description": { + "@value": "fdsfsdfsdfsdfsdfsd", + "@direction": "", + "@language": "" + }, + "tags": [], + "author": "", + "license": { + "name": "https://www.google.de", + "licenseDocuments": [ + { + "name": "https://www.google.de", + "fileType": "text/html; charset=ISO-8859-1", + "sha256": "5a051e8e73057a521eadf3f80219495c4ed04d63397e5817a72ec2c335025b44", + "mirrors": [ + { + "type": "url", + "method": "get", + "url": "https://www.google.de" + } + ] + } + ] + }, + "links": {}, + "additionalInformation": { + "termsAndConditions": true + }, + "copyrightHolder": "", + "providedBy": "" + }, + "services": [ + { + "id": "ccb398c50d6abd5b456e8d7242bd856a1767a890b537c2f8c10ba8b8a10e6025", + "type": "access", + "files": { + "datatokenAddress": "0x0", + "nftAddress": "0x0", + "files": [ + { + "type": "url", + "url": "https://raw.githubusercontent.com/oceanprotocol/c2d-examples/main/branin_and_gpr/branin.arff", + "method": "GET" + } + ] + }, + "state": 0, + "datatokenAddress": "", + "serviceEndpoint": "https://ocean-node-vm3.oceanenterprise.io", + "timeout": 0, + "name": "", + "credentials": { + "allow": [ + { + "type": "address", + "values": [ + { + "address": "*" + } + ] + } + ], + "deny": [], + "match_deny": "any" + } + } + ], + "credentials": { + "allow": [ + { + "type": "SSIpolicy", + "values": [ + { + "request_credentials": [ + { + "format": "jwt_vc_json", + "policies": [], + "type": "UniversityDegree" + } + ], + "vc_policies": [ + "signature", + "not-before", + "revoked-status-list" + ], + "vp_policies": [ + { + "policy": "holder-binding" + }, + { + "policy": "presentation-definition" + }, + { + "policy": "minimum-credentials", + "args": "1" + }, + { + "policy": "maximum-credentials", + "args": "2" + } + ] + } + ] + }, + { + "type": "address", + "values": [ + { + "address": "*" + } + ] + } + ], + "deny": [], + "match_deny": "any" + } + }, + "indexedMetadata": { + "stats": [ + { + "datatokenAddress": "", + "name": "Access Token", + "symbol": "OEAT", + "serviceId": "ccb398c50d6abd5b456e8d7242bd856a1767a890b537c2f8c10ba8b8a10e6025", + "orders": 0, + "prices": [ + { + "type": "dispenser", + "price": "0", + "contract": "", + "token": "" + } + ] + } + ], + "nft": { + "state": 0, + "address": "", + "name": "Data NFT", + "symbol": "OEC-NFT", + "owner": "", + "created": "", + "tokenURI": "" + }, + "event": { + }, + "purgatory": { + "state": false + } + }, + "issuer": "did:jwk:eyJrdHkiOiJPS1AiLCJjcnYiOiJFZDI1NTE5Iiwia2lkIjoibUJjZDJXbGQ0YVNCaHpJR0RKNTVPc1NHWUY4R2h6Vmt2MkVkTC1KYko5MCIsIngiOiJJT1R2UWRqQlRxMzZMbTFuTXdkaTYzN00zdXMycUR3blN5MC13djVkNFRBIn0" +} diff --git a/package-lock.json b/package-lock.json index 20d58e1..ced3fe4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13,6 +13,7 @@ "@oceanprotocol/contracts": "^2.4.1", "@oceanprotocol/ddo-js": "^0.1.4", "@oceanprotocol/lib": "^5.0.3", + "axios": "^1.11.0", "commander": "^13.1.0", "cross-fetch": "^3.1.5", "crypto-js": "^4.1.1", @@ -4954,6 +4955,33 @@ "license": "MIT", "peer": true }, + "node_modules/axios": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.11.0.tgz", + "integrity": "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.4", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/axios/node_modules/form-data": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", + "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/babel-plugin-macros": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz", @@ -8538,6 +8566,26 @@ "dev": true, "license": "ISC" }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, "node_modules/for-each": { "version": "0.3.5", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz", @@ -13081,6 +13129,12 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, "node_modules/psl": { "version": "1.15.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz", diff --git a/package.json b/package.json index 40e8789..a6cddd9 100644 --- a/package.json +++ b/package.json @@ -48,6 +48,7 @@ "@oceanprotocol/contracts": "^2.4.1", "@oceanprotocol/ddo-js": "^0.1.4", "@oceanprotocol/lib": "^5.0.3", + "axios": "^1.11.0", "commander": "^13.1.0", "cross-fetch": "^3.1.5", "crypto-js": "^4.1.1", @@ -59,4 +60,4 @@ "ts-node": "^10.9.1", "tsx": "^4.19.3" } -} \ No newline at end of file +} diff --git a/src/cli.ts b/src/cli.ts index 54dcb44..f35f696 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -141,11 +141,14 @@ export async function createCLI() { .description('Downloads an asset into specified folder') .argument('', 'The asset DID') .argument('[folder]', 'Destination folder', '.') + .argument('[serviceId]', 'Service ID (optional)') .option('-d, --did ', 'The asset DID') .option('-f, --folder [folder]', 'Destination folder', '.') - .action(async (did, folder, options) => { + .option('-s, --service ', 'Service ID') + .action(async (did, folder, serviceId, options) => { const assetDid = options.did || did; const destFolder = options.folder || folder || '.'; + const svcId = options.service || serviceId; if (!assetDid) { console.error(chalk.red('DID is required')); // process.exit(1); @@ -153,7 +156,7 @@ export async function createCLI() { } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - await commands.download([null, assetDid, destFolder]); + await commands.download([null, assetDid, destFolder, svcId]); }); // allowAlgo command @@ -188,29 +191,55 @@ export async function createCLI() { .argument('', 'maxJobDuration for compute job') .argument('', 'Payment token for compute') .argument('', 'Resources of compute environment stringified') + .argument('[serviceIds]', 'Service IDs (comma-separated; positional mapping with datasetDIDs)') + .argument('[algoServiceId]', 'Algorithm Service ID (optional)') .option('-d, --datasets ', 'Dataset DIDs (comma-separated) OR (empty array for none)') .option('-a, --algo ', 'Algorithm DID') .option('-e, --env ', 'Compute environment ID') .option('--maxJobDuration ', 'Compute maxJobDuration') .option('-t, --token ', 'Compute payment token') + .option('-s, --services [serviceIds]', 'Service IDs (comma-separated; positional mapping with datasetDIDs)') + .option('-x, --algo-service [algoServiceId]', 'Algorithm Service ID (optional)') .option('--resources ', 'Compute resources') .option('--accept [boolean]', 'Auto-confirm payment for compute job (true/false)', toBoolean) - .action(async (datasetDids, algoDid, computeEnvId, maxJobDuration, paymentToken, resources, options) => { + .action(async (datasetDids, algoDid, computeEnvId, maxJobDuration, paymentToken, resources, serviceIds, algoServiceId, options) => { const dsDids = options.datasets || datasetDids; const aDid = options.algo || algoDid; const envId = options.env || computeEnvId; const jobDuration = options.maxJobDuration || maxJobDuration; const token = options.token || paymentToken; const res = options.resources || resources; + const svcIds = options.services ?? serviceIds ?? ''; + const algoSvcId = options.algoService ?? algoServiceId ?? ''; if (!dsDids || !aDid || !envId || !jobDuration || !token || !res) { console.error(chalk.red('Missing required arguments')); // process.exit(1); return } + + const dsArr = + dsDids === '[]' + ? [] + : dsDids.split(',').map(s => s.trim()).filter(Boolean); + + const svArr = svcIds + ? svcIds.split(',').map(s => s.trim()).filter(Boolean) + : undefined; + + // Optional check: serviceIds must match length if provided + if (svArr && svArr.length !== dsArr.length) { + console.error( + chalk.red( + `Length mismatch: datasetDids=${dsArr.length} vs serviceIds=${svArr.length}. ` + + 'If serviceIds is provided, it must match datasetDids length (positional 1–1).' + ) + ); + return; + } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - const initArgs = [null, dsDids, aDid, envId, jobDuration, token, res]; + const initArgs = [null, dsDids, aDid, envId, jobDuration, token, res, svcIds, algoSvcId]; const initResp = await commands.initializeCompute(initArgs); if (!initResp) { @@ -239,7 +268,7 @@ export async function createCLI() { console.log(chalk.cyan('Auto-confirm enabled with --yes flag.')); } - const computeArgs = [null, dsDids, aDid, envId, JSON.stringify(initResp), jobDuration, token, res]; + const computeArgs = [null, dsDids, aDid, envId, JSON.stringify(initResp), jobDuration, token, res, svcIds, algoSvcId]; await commands.computeStart(computeArgs); console.log(chalk.green('Compute job started successfully.')); @@ -252,21 +281,48 @@ export async function createCLI() { .argument('', 'Dataset DIDs (comma-separated) OR (empty array for none)') .argument('', 'Algorithm DID') .argument('', 'Compute environment ID') + .argument('[serviceIds]', 'Service IDs (comma-separated; positional mapping with datasetDIDs)') + .argument('[algoServiceId]', 'Algorithm Service ID (optional)') .option('-d, --datasets ', 'Dataset DIDs (comma-separated) OR (empty array for none)') .option('-a, --algo ', 'Algorithm DID') .option('-e, --env ', 'Compute environment ID') - .action(async (datasetDids, algoDid, computeEnvId, options) => { + .option('-s, --services [serviceIds]', 'Service IDs (comma-separated; positional mapping with datasetDIDs)') + .option('-x, --algo-service [algoServiceId]', 'Algorithm Service ID (optional)') + .action(async (datasetDids, algoDid, computeEnvId, serviceIds, algoServiceId, options) => { const dsDids = options.datasets || datasetDids; const aDid = options.algo || algoDid; const envId = options.env || computeEnvId; + const svcIds = options.services ?? serviceIds ?? ''; + const algoSvcId = options.algoService ?? algoServiceId ?? ''; + if (!dsDids || !aDid || !envId) { console.error(chalk.red('Missing required arguments')); // process.exit(1); return } + + const dsArr = + dsDids === '[]' + ? [] + : dsDids.split(',').map(s => s.trim()).filter(Boolean); + + const svArr = svcIds + ? svcIds.split(',').map(s => s.trim()).filter(Boolean) + : undefined; + + // Optional check: serviceIds must match length if provided + if (svArr && svArr.length !== dsArr.length) { + console.error( + chalk.red( + `Length mismatch: datasetDids=${dsArr.length} vs serviceIds=${svArr.length}. ` + + 'If serviceIds is provided, it must match datasetDids length (positional 1–1).' + ) + ); + return; + } const { signer, chainId } = await initializeSigner(); const commands = new Commands(signer, chainId); - await commands.freeComputeStart([null, dsDids, aDid, envId]); + await commands.freeComputeStart([null, dsDids, aDid, envId, svcIds, algoSvcId]); }); // getComputeEnvironments command @@ -325,7 +381,7 @@ export async function createCLI() { .description('Displays the compute job status') .argument('', 'Dataset DID') .argument('', 'Job ID') - .argument('', 'Agreement ID') + .argument('[agreementId]', 'Agreement ID') .option('-d, --dataset ', 'Dataset DID') .option('-j, --job ', 'Job ID') .option('-a, --agreement [agreementId]', 'Agreement ID') diff --git a/src/commands.ts b/src/commands.ts index 112e8e2..c832b9d 100644 --- a/src/commands.ts +++ b/src/commands.ts @@ -31,11 +31,12 @@ import { AccesslistFactory, AccessListContract } from "@oceanprotocol/lib"; -import { Asset } from '@oceanprotocol/ddo-js'; +import { Asset, DDOManager } from '@oceanprotocol/ddo-js'; import { Signer, ethers, getAddress } from "ethers"; import { interactiveFlow } from "./interactiveFlow.js"; import { publishAsset } from "./publishAsset.js"; import chalk from 'chalk'; +import { getPolicyServerOBJ, getPolicyServerOBJs } from "./policyServerHelper.js"; export class Commands { public signer: Signer; @@ -81,12 +82,15 @@ export class Commands { } const encryptDDO = args[2] === "false" ? false : true; try { + const ddoInstance = DDOManager.getDDOClass(asset); + const { indexedMetadata } = ddoInstance.getAssetFields(); + const { services } = ddoInstance.getDDOFields(); // add some more checks const urlAssetId = await createAssetUtil( - asset.indexedMetadata.nft.name, - asset.indexedMetadata.nft.symbol, + indexedMetadata.nft.name, + indexedMetadata.nft.symbol, this.signer, - asset.services[0].files, + services[0].files, asset, this.oceanNodeUrl, this.config, @@ -113,11 +117,14 @@ export class Commands { const encryptDDO = args[2] === "false" ? false : true; // add some more checks try { + const ddoInstance = DDOManager.getDDOClass(algoAsset); + const { indexedMetadata } = ddoInstance.getAssetFields(); + const { services } = ddoInstance.getDDOFields(); const algoDid = await createAssetUtil( - algoAsset.indexedMetadata.nft.name, - algoAsset.indexedMetadata.nft.symbol, + indexedMetadata.nft.name, + indexedMetadata.nft.symbol, this.signer, - algoAsset.services[0].files, + services[0].files, algoAsset, this.oceanNodeUrl, this.config, @@ -192,8 +199,9 @@ export class Commands { } public async download(args: string[]) { + const did = args[1]; const dataDdo = await this.aquarius.waitForIndexer( - args[1], + did, null, null, this.indexingParams.retryInterval, @@ -201,17 +209,27 @@ export class Commands { ); if (!dataDdo) { console.error( - "Error fetching DDO " + args[1] + ". Does this asset exists?" + "Error fetching DDO " + did + ". Does this asset exists?" ); return; } + const ddoInstance = DDOManager.getDDOClass(dataDdo); + const { services, version } = ddoInstance.getDDOFields(); + const serviceId = args[3] ? args[3] : services[0].id; + let policyServer = null + try { + if (version >= '5.0.0') { + policyServer = await getPolicyServerOBJ(dataDdo, serviceId, this.signer, this.oceanNodeUrl); + } + } catch (error) { + throw new Error('Error getting Policy Server Object: ' + error.message) + } const datatoken = new Datatoken( this.signer, this.config.chainId, this.config ); - const tx = await orderAsset( dataDdo, this.signer, @@ -222,7 +240,7 @@ export class Commands { if (!tx) { console.error( - "Error ordering access for " + args[1] + ". Do you have enough tokens?" + "Error ordering access for " + did + ". Do you have enough tokens?" ); return; } @@ -231,11 +249,12 @@ export class Commands { const urlDownloadUrl = await ProviderInstance.getDownloadUrl( dataDdo.id, - dataDdo.services[0].id, + serviceId, 0, orderTx.hash, this.oceanNodeUrl, - this.signer + this.signer, + policyServer ); try { const path = args[2] ? args[2] : "."; @@ -250,18 +269,19 @@ export class Commands { const inputDatasetsString = args[1]; let inputDatasets = []; - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } + if (!inputDatasetsString || inputDatasetsString.trim() === '[]') { + inputDatasets = []; } else { - inputDatasets.push(inputDatasetsString); + const cleaned = inputDatasetsString.replaceAll('[', '').replaceAll(']', ''); + inputDatasets = cleaned.split(',').map(s => s.trim()).filter(Boolean); + } + + const inputServicesString = args[7]; + let inputServices: string[] = []; + if (typeof inputServicesString === 'string' && inputServicesString.trim().length > 0) { + inputServices = inputServicesString.split(',').map(s => s.trim()).filter(Boolean); + } else if (Array.isArray(inputServicesString)) { + inputServices = inputServicesString.map(String).map(s => s.trim()).filter(Boolean); } const ddos = []; @@ -291,10 +311,11 @@ export class Commands { return; } let providerURI = this.oceanNodeUrl; + const ddoInstance = DDOManager.getDDOClass(ddos[0]); + const { services } = ddoInstance.getDDOFields(); if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; + providerURI = services[0].serviceEndpoint; } - const algoDdo = await this.aquarius.waitForIndexer( args[2], null, @@ -324,7 +345,6 @@ export class Commands { // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId let computeEnv = null; // chainComputeEnvs[0]; - if (computeEnvID && computeEnvID.length > 1) { for (const index in computeEnvs) { if (computeEnvID == computeEnvs[index].id) { @@ -340,18 +360,66 @@ export class Commands { ); return; } - + const ddoAlgoInstance = DDOManager.getDDOClass(algoDdo); + const { services: servicesAlgo, metadata: metadataAlgo, version: versionAlgo } = ddoAlgoInstance.getDDOFields(); + const algoServiceIdInput = args[8] as string | undefined; + let chosenAlgoServiceId = servicesAlgo[0].id; + if (typeof algoServiceIdInput === 'string' && algoServiceIdInput.trim().length > 0) { + const expectedAlgoServiceId = algoServiceIdInput.trim(); + const matchAlgoSvc = servicesAlgo.find((s: any) => s.id === expectedAlgoServiceId); + if (!matchAlgoSvc) { + console.error( + `Algorithm Service ID "${expectedAlgoServiceId}" not found in algo DDO ${algoDdo.id}. ` + + 'Provide a valid service.id from the algorithm asset or omit the argument to use the default (services[0]).' + ); + return; + } + chosenAlgoServiceId = expectedAlgoServiceId; + } const algo: ComputeAlgorithm = { documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, + serviceId: chosenAlgoServiceId, + meta: metadataAlgo.algorithm, }; + const assetAlgo: { + documentId: string; + serviceId: string; + asset: Asset; + version?: string; + } = { + documentId: algoDdo.id, + serviceId: chosenAlgoServiceId, + asset: algoDdo, + version: versionAlgo + }; const assets = []; for (const dataDdo in ddos) { + const ddoInstanceDdo = DDOManager.getDDOClass(ddos[dataDdo]); + const { services: servicesDdo, version: versionDdo } = ddoInstanceDdo.getDDOFields(); + let chosenServiceId = servicesDdo[0].id; + if (inputServices.length > 0) { + const expectedServiceId = inputServices[Number(dataDdo)]; + const match = servicesDdo.find((s: any) => s.id === expectedServiceId); + if (!match) { + console.error( + `Service ID "${expectedServiceId}" not found in dataset ${inputDatasets[Number(dataDdo)]}. ` + + 'Ensure serviceIds[i] exists in the corresponding dataset services.' + ); + return; + } + chosenServiceId = expectedServiceId; + if (Number(dataDdo) === 0 && match.serviceEndpoint) { + providerURI = match.serviceEndpoint; + } + } else { + if (Number(dataDdo) === 0 && servicesDdo[0]?.serviceEndpoint) { + providerURI = servicesDdo[0].serviceEndpoint; + } + } const canStartCompute = isOrderable( ddos[dataDdo], - ddos[dataDdo].services[0].id, + chosenServiceId, algo, algoDdo ); @@ -363,7 +431,9 @@ export class Commands { } assets.push({ documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, + serviceId: chosenServiceId, + asset: ddos[dataDdo], + version: versionDdo }); } const maxJobDuration = Number(args[4]) @@ -445,6 +515,7 @@ export class Commands { ); return; } + const policiesServer = await getPolicyServerOBJs(assets, assetAlgo, this.signer, this.oceanNodeUrl); const parsedResources = JSON.parse(resources); const providerInitializeComputeJob = await ProviderInstance.initializeCompute( @@ -456,7 +527,8 @@ export class Commands { providerURI, this.signer, // V1 was this.signer.getAddress() parsedResources, - Number(chainId) + Number(chainId), + policiesServer ); if ( !providerInitializeComputeJob || @@ -480,20 +552,22 @@ export class Commands { const inputDatasetsString = args[1]; let inputDatasets = []; - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } + if (!inputDatasetsString || inputDatasetsString.trim() === '[]') { + inputDatasets = []; } else { - inputDatasets.push(inputDatasetsString); + const cleaned = inputDatasetsString.replaceAll('[', '').replaceAll(']', ''); + inputDatasets = cleaned.split(',').map(s => s.trim()).filter(Boolean); + } + + const inputServicesString = args[8]; + let inputServices: string[] = []; + if (typeof inputServicesString === 'string' && inputServicesString.trim().length > 0) { + inputServices = inputServicesString.split(',').map(s => s.trim()).filter(Boolean); + } else if (Array.isArray(inputServicesString)) { + inputServices = inputServicesString.map(String).map(s => s.trim()).filter(Boolean); } + const ddos = []; for (const dataset in inputDatasets) { @@ -521,8 +595,10 @@ export class Commands { return; } let providerURI = this.oceanNodeUrl; + const ddoInstance = DDOManager.getDDOClass(ddos[0]); + const { services } = ddoInstance.getDDOFields(); if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; + providerURI = services[0].serviceEndpoint; } const algoDdo = await this.aquarius.waitForIndexer( args[2], @@ -568,18 +644,81 @@ export class Commands { ); return; } - + const ddoInstanceAlgo = DDOManager.getDDOClass(algoDdo); + const { services: servicesAlgo, metadata: metadataAlgo, version: versionAlgo } = ddoInstanceAlgo.getDDOFields(); + const algoServiceIdInput = args[9] as string | undefined; + let chosenAlgoServiceId = servicesAlgo[0].id; + if (typeof algoServiceIdInput === 'string' && algoServiceIdInput.trim().length > 0) { + const expectedAlgoServiceId = algoServiceIdInput.trim(); + const matchAlgoSvc = servicesAlgo.find((s: any) => s.id === expectedAlgoServiceId); + if (!matchAlgoSvc) { + console.error( + `Algorithm Service ID "${expectedAlgoServiceId}" not found in algo DDO ${algoDdo.id}. ` + + 'Provide a valid service.id from the algorithm asset or omit the argument to use the default (services[0]).' + ); + return; + } + chosenAlgoServiceId = expectedAlgoServiceId; + } + const algoServiceIndex = servicesAlgo.findIndex((s: any) => s.id === chosenAlgoServiceId); + if (algoServiceIndex < 0) { + console.error(`Could not resolve serviceIndex for algorithm serviceId ${chosenAlgoServiceId}`); + return; + } const algo: ComputeAlgorithm = { documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, + serviceId: chosenAlgoServiceId, + meta: metadataAlgo.algorithm, + }; + + const assetAlgo: { + documentId: string; + serviceId: string; + asset: Asset; + version?: string; + } = { + documentId: algoDdo.id, + serviceId: chosenAlgoServiceId, + asset: algoDdo, + version: versionAlgo }; const assets = []; + const datasetServiceIndex: number[] = []; for (const dataDdo in ddos) { + const ddoInstanceDdo = DDOManager.getDDOClass(ddos[dataDdo]); + const { services: servicesDdo, version: versionDdo } = ddoInstanceDdo.getDDOFields(); + let chosenServiceId = servicesDdo[0].id; + if (inputServices.length > 0) { + const expectedServiceId = inputServices[Number(dataDdo)]; + const match = servicesDdo.find((s: any) => s.id === expectedServiceId); + if (!match) { + console.error( + `Service ID "${expectedServiceId}" not found in dataset ${inputDatasets[Number(dataDdo)]}. ` + + 'Ensure serviceIds[i] exists in the corresponding dataset services.' + ); + return; + } + chosenServiceId = expectedServiceId; + if (Number(dataDdo) === 0 && match.serviceEndpoint) { + providerURI = match.serviceEndpoint; + } + } else { + if (Number(dataDdo) === 0 && servicesDdo[0]?.serviceEndpoint) { + providerURI = servicesDdo[0].serviceEndpoint; + } + } + const chosenServiceIndex = servicesDdo.findIndex((s: any) => s.id === chosenServiceId); + if (chosenServiceIndex < 0) { + console.error( + `Could not resolve serviceIndex for dataset ${ddos[dataDdo].id} with serviceId ${chosenServiceId}` + ); + return; + } + datasetServiceIndex.push(chosenServiceIndex); const canStartCompute = isOrderable( ddos[dataDdo], - ddos[dataDdo].services[0].id, + chosenServiceId, algo, algoDdo ); @@ -591,7 +730,9 @@ export class Commands { } assets.push({ documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, + serviceId: chosenServiceId, + asset: ddos[dataDdo], + version: versionDdo }); } const providerInitializeComputeJob = args[4]; // provider fees + payment @@ -607,7 +748,7 @@ export class Commands { algoDdo, this.signer, computeEnv.consumerAddress, - 0, + algoServiceIndex, datatoken, this.config, parsedProviderInitializeComputeJob?.algorithm?.providerFee, @@ -629,7 +770,7 @@ export class Commands { ddos[i], this.signer, computeEnv.consumerAddress, - 0, + datasetServiceIndex[i], datatoken, this.config, parsedProviderInitializeComputeJob?.datasets[i].providerFee, @@ -754,32 +895,10 @@ export class Commands { console.log("Starting compute job using provider: ", providerURI); - const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; - if (assets.length > 0) { - console.log( - "Starting compute job on " + - assets[0].documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } else { - console.log( - "Starting compute job on " + - algo.documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } - if (additionalDatasets !== null) { - console.log( - "Adding additional datasets to dataset, according to C2D V2 specs" - ); - assets.push(additionalDatasets); - } - const output: ComputeOutput = { metadataUri: await getMetadataURI(), }; + const policiesServer = await getPolicyServerOBJs(assets, assetAlgo, this.signer, this.oceanNodeUrl); const computeJobs = await ProviderInstance.computeStart( providerURI, @@ -794,7 +913,8 @@ export class Commands { null, null, // additionalDatasets, only c2d v1 - output + output, + policiesServer ); console.log("computeJobs: ", computeJobs); @@ -812,20 +932,19 @@ export class Commands { const inputDatasetsString = args[1]; let inputDatasets = []; - if ( - inputDatasetsString.includes("[") && - inputDatasetsString.includes("]") - ) { - const processedInput = inputDatasetsString - .replaceAll("]", "") - .replaceAll("[", ""); - if (processedInput.indexOf(",") > -1) { - inputDatasets = processedInput.split(","); - } + if (!inputDatasetsString || inputDatasetsString.trim() === '[]') { + inputDatasets = []; } else { - inputDatasets.push(inputDatasetsString); + const cleaned = inputDatasetsString.replaceAll('[', '').replaceAll(']', ''); + inputDatasets = cleaned.split(',').map(s => s.trim()).filter(Boolean); + } + const inputServicesString = args[4]; + let inputServices: string[] = []; + if (typeof inputServicesString === 'string' && inputServicesString.trim().length > 0) { + inputServices = inputServicesString.split(',').map(s => s.trim()).filter(Boolean); + } else if (Array.isArray(inputServicesString)) { + inputServices = inputServicesString.map(String).map(s => s.trim()).filter(Boolean); } - const ddos = []; for (const dataset in inputDatasets) { @@ -854,8 +973,10 @@ export class Commands { return; } let providerURI = this.oceanNodeUrl; + const ddoInstance = DDOManager.getDDOClass(ddos[0]); + const { services } = ddoInstance.getDDOFields(); if (ddos.length > 0) { - providerURI = ddos[0].services[0].serviceEndpoint; + providerURI = services[0].serviceEndpoint; } const algoDdo = await this.aquarius.waitForIndexer( @@ -875,7 +996,6 @@ export class Commands { const computeEnvs = await ProviderInstance.getComputeEnvironments( this.oceanNodeUrl ); - if (!computeEnvs || computeEnvs.length < 1) { console.error( "Error fetching compute environments. No compute environments available." @@ -891,7 +1011,6 @@ export class Commands { // NO chainId needed anymore (is not part of ComputeEnvironment spec anymore) // const chainComputeEnvs = computeEnvs[computeEnvID]; // was algoDdo.chainId let computeEnv = null; // chainComputeEnvs[0]; - if (computeEnvID && computeEnvID.length > 1) { for (const env of computeEnvs) { if (computeEnvID == env.id && env.free) { @@ -908,18 +1027,68 @@ export class Commands { ); return; } + const ddoInstanceAlgo = DDOManager.getDDOClass(algoDdo); + const { services: servicesAlgo, metadata: metadataAlgo, version: versionAlgo } = ddoInstanceAlgo.getDDOFields(); + const algoServiceIdInput = args[5] as string | undefined; + let chosenAlgoServiceId = servicesAlgo[0].id; + if (typeof algoServiceIdInput === 'string' && algoServiceIdInput.trim().length > 0) { + const expectedAlgoServiceId = algoServiceIdInput.trim(); + const matchAlgoSvc = servicesAlgo.find((s: any) => s.id === expectedAlgoServiceId); + if (!matchAlgoSvc) { + console.error( + `Algorithm Service ID "${expectedAlgoServiceId}" not found in algo DDO ${algoDdo.id}. ` + + 'Provide a valid service.id from the algorithm asset or omit the argument to use the default (services[0]).' + ); + return; + } + chosenAlgoServiceId = expectedAlgoServiceId; + } const algo: ComputeAlgorithm = { documentId: algoDdo.id, - serviceId: algoDdo.services[0].id, - meta: algoDdo.metadata.algorithm, + serviceId: chosenAlgoServiceId, + meta: metadataAlgo.algorithm, + }; + + const assetAlgo: { + documentId: string; + serviceId: string; + asset: Asset; + version?: string; + } = { + documentId: algoDdo.id, + serviceId: chosenAlgoServiceId, + asset: algoDdo, + version: versionAlgo }; const assets = []; for (const dataDdo in ddos) { + const ddoInstanceDdo = DDOManager.getDDOClass(ddos[dataDdo]); + const { services: servicesDdo, version: versionDdo } = ddoInstanceDdo.getDDOFields(); + let chosenServiceId = servicesDdo[0].id; + if (inputServices.length > 0) { + const expectedServiceId = inputServices[Number(dataDdo)]; + const match = servicesDdo.find((s: any) => s.id === expectedServiceId); + if (!match) { + console.error( + `Service ID "${expectedServiceId}" not found in dataset ${inputDatasets[Number(dataDdo)]}. ` + + 'Ensure serviceIds[i] exists in the corresponding dataset services.' + ); + return; + } + chosenServiceId = expectedServiceId; + if (Number(dataDdo) === 0 && match.serviceEndpoint) { + providerURI = match.serviceEndpoint; + } + } else { + if (Number(dataDdo) === 0 && servicesDdo[0]?.serviceEndpoint) { + providerURI = servicesDdo[0].serviceEndpoint; + } + } const canStartCompute = isOrderable( ddos[dataDdo], - ddos[dataDdo].services[0].id, + chosenServiceId, algo, algoDdo ); @@ -931,39 +1100,17 @@ export class Commands { } assets.push({ documentId: ddos[dataDdo].id, - serviceId: ddos[dataDdo].services[0].id, + serviceId: chosenServiceId, + asset: ddos[dataDdo], + version: versionDdo }); } - console.log("Starting compute job using provider: ", providerURI); - const additionalDatasets = assets.length > 1 ? assets.slice(1) : null; - if (assets.length > 0) { - console.log( - "Starting compute job on " + - assets[0].documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } else { - console.log( - "Starting compute job on " + - algo.documentId + - " with additional datasets:" + - (!additionalDatasets ? "none" : additionalDatasets[0].documentId) - ); - } - - if (additionalDatasets !== null) { - console.log( - "Adding additional datasets to dataset, according to C2D V2 specs" - ); - assets.push(additionalDatasets); - } - const output: ComputeOutput = { metadataUri: await getMetadataURI(), }; + const policiesServer = await getPolicyServerOBJs(assets, assetAlgo, this.signer, this.oceanNodeUrl); const computeJobs = await ProviderInstance.freeComputeStart( providerURI, this.signer, @@ -973,10 +1120,10 @@ export class Commands { null, null, null, - output + output, + policiesServer ); - console.log("compute jobs: ", computeJobs); if (computeJobs && computeJobs[0]) { const { jobId } = computeJobs[0]; @@ -1059,21 +1206,24 @@ export class Commands { this.indexingParams.retryInterval, this.indexingParams.maxRetries ); + if (!asset) { console.error( "Error fetching DDO " + args[1] + ". Does this asset exists?" ); return; } - - if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { + const ddoInstance = DDOManager.getDDOClass(asset); + const { indexedMetadata } = ddoInstance.getAssetFields(); + const { services } = ddoInstance.getDDOFields(); + if (indexedMetadata.nft.owner !== (await this.signer.getAddress())) { console.error( "You are not the owner of this asset, and there for you cannot update it." ); return; } - if (asset.services[0].type !== "compute") { + if (services[0].type !== "compute") { console.error( "Error getting computeService for " + args[1] + @@ -1094,13 +1244,15 @@ export class Commands { ); return; } + const algoInstance = DDOManager.getDDOClass(algoAsset); + const { services: servicesAlgo, metadata: metadataAlgo } = algoInstance.getDDOFields(); const encryptDDO = args[3] === "false" ? false : true; let filesChecksum; try { filesChecksum = await ProviderInstance.checkDidFiles( algoAsset.id, - algoAsset.services[0].id, - algoAsset.services[0].serviceEndpoint, + servicesAlgo[0].id, + servicesAlgo[0].serviceEndpoint, true ); } catch (e) { @@ -1109,14 +1261,15 @@ export class Commands { } const containerChecksum = - algoAsset.metadata.algorithm.container.entrypoint + - algoAsset.metadata.algorithm.container.checksum; + metadataAlgo.algorithm.container.entrypoint + + metadataAlgo.algorithm.container.checksum; const trustedAlgorithm = { did: algoAsset.id, containerSectionChecksum: getHash(containerChecksum), filesChecksum: filesChecksum?.[0]?.checksum, + serviceId: servicesAlgo[0].id, }; - asset.services[0].compute.publisherTrustedAlgorithms.push(trustedAlgorithm); + services[0].compute.publisherTrustedAlgorithms.push(trustedAlgorithm); try { const txid = await updateAssetMetadata( this.signer, @@ -1146,13 +1299,16 @@ export class Commands { ); return; } - if (asset.indexedMetadata.nft.owner !== (await this.signer.getAddress())) { + const ddoInstance = DDOManager.getDDOClass(asset); + const { indexedMetadata } = ddoInstance.getAssetFields(); + const { services } = ddoInstance.getDDOFields(); + if (indexedMetadata.nft.owner !== (await this.signer.getAddress())) { console.error( "You are not the owner of this asset, and there for you cannot update it." ); return; } - if (asset.services[0].type !== "compute") { + if (services[0].type !== "compute") { console.error( "Error getting computeService for " + args[1] + @@ -1160,7 +1316,7 @@ export class Commands { ); return; } - if (asset.services[0].compute.publisherTrustedAlgorithms) { + if (services[0].compute.publisherTrustedAlgorithms) { console.error( " " + args[1] + ". Does this asset has an computeService?" ); @@ -1168,12 +1324,12 @@ export class Commands { } const encryptDDO = args[3] === "false" ? false : true; const indexToDelete = - asset.services[0].compute.publisherTrustedAlgorithms.findIndex( + services[0].compute.publisherTrustedAlgorithms.findIndex( (item) => item.did === args[2] ); if (indexToDelete !== -1) { - asset.services[0].compute.publisherTrustedAlgorithms.splice( + services[0].compute.publisherTrustedAlgorithms.splice( indexToDelete, 1 ); diff --git a/src/helpers.ts b/src/helpers.ts index e42eb81..f020e04 100644 --- a/src/helpers.ts +++ b/src/helpers.ts @@ -3,7 +3,7 @@ import fetch from "cross-fetch"; import { promises as fs, readFileSync } from "fs"; import * as path from "path"; import * as sapphire from '@oasisprotocol/sapphire-paratime'; -import { Asset, DDO } from '@oceanprotocol/ddo-js'; +import { Asset, DDO, DDOManager } from '@oceanprotocol/ddo-js'; import { AccesslistFactory, Aquarius, Nft, @@ -118,7 +118,6 @@ export async function createAssetUtil( accessListFactory?: string, allowAccessList?: string, denyAccessList?: string, - ) { const isAddress = typeof templateIDorAddress === 'string' const isTemplateIndex = typeof templateIDorAddress === 'number' @@ -159,10 +158,12 @@ export async function updateAssetMetadata( let flags; let metadata; const validateResult = await aquariusInstance.validate(updatedDdo, owner, oceanNodeUrl); + const ddoInstance = DDOManager.getDDOClass(updatedDdo); + const { chainId, nftAddress } = ddoInstance.getDDOFields(); if (encryptDDO) { const providerResponse = await ProviderInstance.encrypt( updatedDdo, - updatedDdo.chainId, + chainId, oceanNodeUrl ); metadata = await providerResponse; @@ -176,7 +177,7 @@ export async function updateAssetMetadata( } const updateDdoTX = await nft.setMetadata( - updatedDdo.nftAddress, + nftAddress, await owner.getAddress(), 0, oceanNodeUrl, @@ -201,9 +202,9 @@ export async function handleComputeOrder( consumeMarkerFee?: ConsumeMarketFee ) { /* We do have 3 possible situations: - - have validOrder and no providerFees -> then order is valid, providerFees are valid, just use it in startCompute - - have validOrder and providerFees -> then order is valid but providerFees are not valid, we need to call reuseOrder and pay only providerFees - - no validOrder -> we need to call startOrder, to pay 1 DT & providerFees + - have validOrder and no providerFees -> then order is valid, providerFees are valid, just use it in startCompute + - have validOrder and providerFees -> then order is valid but providerFees are not valid, we need to call reuseOrder and pay only providerFees + - no validOrder -> we need to call startOrder, to pay 1 DT & providerFees */ const hasProviderFees = order.providerFee && order.providerFee.providerFeeAmount // no need to approve if it is 0 @@ -256,7 +257,9 @@ export async function isOrderable( algorithm: ComputeAlgorithm, algorithmDDO: Asset | DDO ): Promise { - const datasetService = asset.services.find((s) => s.id === serviceId); + const ddoInstanceAsset = DDOManager.getDDOClass(asset); + const { services: servicesAsset } = ddoInstanceAsset.getDDOFields(); + const datasetService = servicesAsset.find((s) => s.id === serviceId); if (!datasetService) return false; if (datasetService.type === "compute") { @@ -417,4 +420,4 @@ export async function getConfigByChainId(chainId: number) { } return chainConfig; -} \ No newline at end of file +} diff --git a/src/policyServerHelper.ts b/src/policyServerHelper.ts new file mode 100644 index 0000000..9d28fed --- /dev/null +++ b/src/policyServerHelper.ts @@ -0,0 +1,418 @@ +import { Asset } from "@oceanprotocol/ddo-js" +import { PolicyServerActions, PolicyServerGetPdAction, PolicyServerInitiateAction, PolicyServerInitiateActionData, PolicyServerInitiateComputeActionData, PolicyServerPresentationDefinition, SsiVerifiableCredential, SsiWalletDid, SsiWalletSession } from "./policyServerInterfaces" +import axios from "axios" +import { Signer } from "ethers" + +export async function connectToSSIWallet( + owner: Signer, + api: string +): Promise { + if (!api) { + throw new Error('No SSI Wallet API configured') + } + + try { + let response = await axios.get(`${api}/wallet-api/auth/account/web3/nonce`) + + const nonce = response.data + const payload = { + challenge: nonce, + signed: await owner.signMessage(nonce), + publicKey: await owner.getAddress() + } + + response = await axios.post( + `${api}/wallet-api/auth/account/web3/signed`, + payload + ) + return response.data + } catch (error) { + throw error.response + } +} + +export async function sendPresentationRequest( + walletId: string, + did: string, + presentationRequest: string, + selectedCredentials: string[], + token: string, + api: string +): Promise<{ redirectUri: string }> { + if (!api) { + throw new Error('No SSI Wallet API configured') + } + try { + const response = await axios.post( + `${api}/wallet-api/wallet/${walletId}/exchange/usePresentationRequest`, + { + did, + presentationRequest, + selectedCredentials + }, + { + headers: { + Authorization: `Bearer ${token}` + }, + withCredentials: true + } + ) + + return response.data + } catch (error) { + throw error.response + } +} + +export async function resolvePresentationRequest( + walletId: string, + presentationRequest: string, + token: string, + api: string +): Promise { + if (!api) { + throw new Error('No SSI Wallet API configured') + } + try { + const response = await axios.post( + `${api}/wallet-api/wallet/${walletId}/exchange/resolvePresentationRequest`, + presentationRequest, + { + headers: { + Authorization: `Bearer ${token}` + }, + withCredentials: true + } + ) + + return response.data + } catch (error) { + throw error.response + } +} + +export async function getWalletDids( + walletId: string, + token: string, + api: string +): Promise { + if (!api) { + throw new Error('No SSI Wallet API configured') + } + try { + const response = await axios.get( + `${api}/wallet-api/wallet/${walletId}/dids`, + { + headers: { + Authorization: `Bearer ${token}` + }, + withCredentials: true + } + ) + + return response.data + } catch (error) { + throw error.response + } +} + + +export async function requestCredentialPresentation( + asset: Asset, + consumerAddress: string, + serviceId: string, + providerUrl: string +): Promise<{ + success: boolean + openid4vc: string + policyServerData: PolicyServerInitiateActionData, +}> { + try { + const sessionId = crypto.randomUUID() + + const policyServer: PolicyServerInitiateActionData = { + sessionId, + successRedirectUri: ``, + errorRedirectUri: ``, + responseRedirectUri: ``, + presentationDefinitionUri: `` + } + + const action: PolicyServerInitiateAction = { + action: PolicyServerActions.INITIATE, + ddo: asset, + policyServer, + serviceId, + consumerAddress + } + const response = await axios.post( + `${providerUrl}/api/services/PolicyServerPassthrough`, + { + policyServerPassthrough: action + } + ) + + if (response.data.length === 0) { + // eslint-disable-next-line no-throw-literal + throw { success: false, message: 'No openid4vc url found' } + } + + return { + success: response.data?.success, + openid4vc: response.data?.message, + policyServerData: policyServer + } + } catch (error) { + if (error.request?.response) { + const err = JSON.parse(error.request.response) + throw err + } + if (error.response?.data) { + throw error.response?.data + } + throw error + } +} + +export async function matchCredentialForPresentationDefinition( + api: string, + walletId: string, + presentationDefinition: any, + token: string +): Promise { + if (!api) { + throw new Error('No SSI Wallet API configured') + } + try { + const response = await axios.post( + `${api}/wallet-api/wallet/${walletId}/exchange/matchCredentialsForPresentationDefinition`, + presentationDefinition, + { + headers: { + Authorization: `Bearer ${token}` + }, + withCredentials: true + } + ) + + return response.data + } catch (error) { + throw error.response + } +} + +export async function getPd( + sessionId: string, + providerUrl: string +): Promise { + try { + const action: PolicyServerGetPdAction = { + action: PolicyServerActions.GET_PD, + sessionId + } + const response = await axios.post( + `${providerUrl}/api/services/PolicyServerPassthrough`, + { + policyServerPassthrough: action + } + ) + + if (typeof response.data === 'string' && response.data.length === 0) { + // eslint-disable-next-line no-throw-literal + throw { + success: false, + message: 'Could not read presentation definition' + } + } + + return response.data?.message + } catch (error) { + if (error.response?.data) { + throw error.response?.data + } + throw error + } +} + +export function extractURLSearchParams( + urlString: string +): Record { + const url = new URL(urlString) + const { searchParams } = url + const params: Record = {} + searchParams.forEach((value, key) => (params[key] = value)) + return params +} + +export async function getPolicyServerOBJ( + ddo: Asset, + serviceId: string, + signer: Signer, + providerUrl: string +): Promise { + try { + const accountId = await signer.getAddress() + const presentationResult = await requestCredentialPresentation( + ddo, + accountId, + serviceId, + providerUrl + ) + + if ( + !presentationResult.openid4vc || + !presentationResult.success || + !presentationResult.policyServerData.sessionId + ) { + throw new Error('No valid openid4vc url found') + } + if ( + presentationResult.openid4vc && + typeof presentationResult.openid4vc === 'object' && + (presentationResult.openid4vc as any).redirectUri && + (presentationResult.openid4vc as any).redirectUri.includes( + 'success' + ) + ) { + const { id } = extractURLSearchParams( + (presentationResult.openid4vc as any).redirectUri + ) + return { + sessionId: id, + successRedirectUri: '', + errorRedirectUri: '', + responseRedirectUri: '', + presentationDefinitionUri: '' + } + } + const verifierSessionId = presentationResult.policyServerData.sessionId + + const presentationDefinition = await getPd(verifierSessionId, providerUrl) + const ssiApi = process.env.SSI_WALLET_API + if (!ssiApi) { + throw new Error('No SSI_WALLET_API configured') + } + const sessionToken = await connectToSSIWallet(signer, ssiApi) + const walletId = process.env.SSI_WALLET_ID + if (!walletId) { + throw new Error('No SSI_WALLET_ID configured') + } + const verifiableCredentials = await matchCredentialForPresentationDefinition( + ssiApi, + walletId, + presentationDefinition, + sessionToken.token + ) + const dids = await getWalletDids( + walletId, + sessionToken.token, + ssiApi + ) + if (!dids || dids.length === 0) { + throw new Error('No DIDs found in wallet') + } + const resolvedPresentationRequest = await resolvePresentationRequest( + walletId, + presentationResult.openid4vc, + sessionToken.token, + ssiApi + ) + const myDid = process.env.SSI_WALLET_DID + if (myDid && !dids.find((d) => d.did === myDid)) { + throw new Error(`DID ${myDid} not found in wallet`) + } + const did = myDid ? myDid : dids[0].did + const result = await sendPresentationRequest( + walletId, + did, + resolvedPresentationRequest, + verifiableCredentials.map((vc) => vc.id), + sessionToken.token, + ssiApi + ) + if ( + 'errorMessage' in result || + (result.redirectUri && result.redirectUri.includes('error')) + ) { + throw new Error('Credential presentation failed') + } + return { + sessionId: verifierSessionId, + successRedirectUri: '', + errorRedirectUri: '', + responseRedirectUri: '', + presentationDefinitionUri: '' + } + } catch (error: any) { + console.error('getPolicyServerOBJ error:', error) + if (error?.message) { + throw new Error(`getPolicyServerOBJ failed: ${error.message}`) + } + throw new Error('getPolicyServerOBJ failed') + } +} + +export async function getPolicyServerOBJs( + ddos: { + documentId: string + serviceId: string + asset: Asset + version?: string + }[], + algo: { + documentId: string + serviceId: string + asset: Asset + version?: string + }, + signer: Signer, + providerUrl: string +): Promise { + try { + const results: PolicyServerInitiateComputeActionData[] = [] + + // --- datasets + for (const ddo of ddos) { + if (!ddo.version || ddo.version < '5.0.0') { + return null + } + const result = await getPolicyServerOBJ( + ddo.asset, + ddo.serviceId, + signer, + providerUrl + ) + results.push({ + ...result, + documentId: ddo.documentId, + serviceId: ddo.serviceId + }) + } + + // --- algo + if (!algo?.version || algo.version < '5.0.0') { + return null + } + if (algo.serviceId) { + const algoResult = await getPolicyServerOBJ( + algo.asset, + algo.serviceId, + signer, + providerUrl + ) + results.push({ + ...algoResult, + documentId: algo.documentId, + serviceId: algo.serviceId + }) + } + + return results + } catch (error: any) { + console.error('getPolicyServerOBJs error:', error) + if (error?.message) { + throw new Error(`getPolicyServerOBJs failed: ${error.message}`) + } + throw new Error('getPolicyServerOBJs failed') + } +} diff --git a/src/policyServerInterfaces.ts b/src/policyServerInterfaces.ts new file mode 100644 index 0000000..5d6cdf9 --- /dev/null +++ b/src/policyServerInterfaces.ts @@ -0,0 +1,104 @@ +export interface SsiWalletSession { + session_id: string + status: string + token: string + expiration: Date +} + +export interface SsiVerifiableCredential { + id: string + parsedDocument: { + id: string + type: string[] + issuer: string + issuanceDate: Date + credentialSubject: Record + } +} + +export interface SsiWalletDid { + alias: string + did: string + document: string + keyId: string +} + +export enum PolicyServerActions { + INITIATE = 'initiate', + GET_PD = 'getPD', + CHECK_SESSION_ID = 'checkSessionId', + PRESENTATION_REQUEST = 'presentationRequest', + DOWNLOAD = 'download', + PASSTHROUGH = 'passthrough' +} + +export interface PolicyServerResponse { + success: boolean + message: string + httpStatus: number +} + +export interface PolicyServerInitiateActionData { + sessionId: string + successRedirectUri: string + errorRedirectUri: string + responseRedirectUri: string + presentationDefinitionUri: string +} + +export interface PolicyServerInitiateComputeActionData + extends PolicyServerInitiateActionData { + documentId: string + serviceId: string +} + + +export interface PolicyServerInitiateComputeActionData + extends PolicyServerInitiateActionData { + documentId: string + serviceId: string +} + +export interface PolicyServerInitiateAction { + action: PolicyServerActions.INITIATE + ddo: any + policyServer: PolicyServerInitiateActionData + serviceId: string + consumerAddress: string +} + +export interface PolicyServerGetPdAction { + action: PolicyServerActions.GET_PD + sessionId: string +} + +export interface PolicyServerCheckSessionIdAction { + action: PolicyServerActions.CHECK_SESSION_ID + sessionId: string +} + +export interface PolicyServerPresentationRequestAction { + action: PolicyServerActions.PRESENTATION_REQUEST + sessionId: string + vp_token: any + response: any + presentation_submission: any +} + +export interface PolicyServerDownloadAction { + action: PolicyServerActions.DOWNLOAD + policyServer: { + sessionId: string + } +} + +export interface PolicyServerPassthrough { + action: PolicyServerActions.PASSTHROUGH + url: string + httpMethod: 'GET' + body: any +} + +export interface PolicyServerPresentationDefinition { + input_descriptors: any[] +} diff --git a/test/consumeFlow.test.ts b/test/consumeFlow.test.ts index 94545c5..c30696a 100644 --- a/test/consumeFlow.test.ts +++ b/test/consumeFlow.test.ts @@ -6,7 +6,7 @@ import https from "https"; import { projectRoot, runCommand } from "./util.js"; -describe("Ocean CLI Publishing", function() { +describe("Ocean CLI Publishing", function () { this.timeout(200000); // Set a longer timeout to allow the command to execute let downloadDatasetDid: string; @@ -36,9 +36,9 @@ describe("Ocean CLI Publishing", function() { }); }); }; - - it("should publish a dataset using 'npm run cli publish'", async function() { + + it("should publish a dataset using 'npm run cli publish'", async function () { const metadataFile = path.resolve(projectRoot, "metadata/simpleDownloadDataset.json"); // Ensure the metadata file exists @@ -54,22 +54,44 @@ describe("Ocean CLI Publishing", function() { const output = await runCommand(`npm run cli publish ${metadataFile}`); - const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find did in the output"); - } + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } - try { - downloadDatasetDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + try { + downloadDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a compute dataset using 'npm run cli publish'", async function() { + it("should publish a dataset v5 using 'npm run cli publish'", async function () { + const metadataFile = path.resolve(projectRoot, "metadata/simpleDownloadDatasetV5.json"); + + // Ensure the metadata file exists + if (!fs.existsSync(metadataFile)) { + throw new Error("Metadata file not found: " + metadataFile); + } + + process.env.PRIVATE_KEY = "0x1d751ded5a32226054cd2e71261039b65afb9ee1c746d055dd699b1150a5befc"; + // Using this account: 0x529043886F21D9bc1AE0feDb751e34265a246e47 + process.env.RPC = "http://127.0.0.1:8545"; + process.env.NODE_URL = "http://127.0.0.1:8001"; + process.env.ADDRESS_FILE = path.join(process.env.HOME || "", ".ocean/ocean-contracts/artifacts/address.json"); + + const output = await runCommand(`npm run cli publish ${metadataFile}`); + const jsonMatch = output.match(/did:ope:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } + }); + + it("should publish a compute dataset using 'npm run cli publish'", async function () { const metadataFile = path.resolve(projectRoot, "metadata/simpleComputeDataset.json"); // Ensure the metadata file exists if (!fs.existsSync(metadataFile)) { @@ -78,44 +100,60 @@ describe("Ocean CLI Publishing", function() { const output = await runCommand(`npm run cli publish ${metadataFile}`); - const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find did in the output"); - } - - try { - computeDatasetDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } + + try { + computeDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function() { + it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function () { const filePath = path.resolve(projectRoot, "metadata/jsAlgo.json"); // Ensure the metadata file exists if (!fs.existsSync(filePath)) { throw new Error("Metadata file not found: " + filePath); } - + const output = await runCommand(`npm run cli publishAlgo ${filePath}`); const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find did in the output"); - } - - try { - jsAlgoDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } + + try { + jsAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a python Algorithm using 'npm run cli publishAlgo'", async function() { + it("should publish a js Algorithm V5 using 'npm run cli publishAlgo'", async function () { + const filePath = path.resolve(projectRoot, "metadata/jsAlgoV5.json"); + + // Ensure the metadata file exists + if (!fs.existsSync(filePath)) { + throw new Error("Metadata file not found: " + filePath); + } + + const output = await runCommand(`npm run cli publishAlgo ${filePath}`); + const jsonMatch = output.match(/did:ope:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } + }); + + it("should publish a python Algorithm using 'npm run cli publishAlgo'", async function () { const filePath = path.resolve(projectRoot, "metadata/pythonAlgo.json"); // Ensure the metadata file exists @@ -126,91 +164,91 @@ describe("Ocean CLI Publishing", function() { const output = await runCommand(`npm run cli publishAlgo ${filePath}`); const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find did in the output"); - } - - try { - pythonAlgoDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + console.error("Raw output:", output); + throw new Error("Could not find did in the output"); + } + + try { + pythonAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for download dataset", async function() { + it("should get DDO using 'npm run cli getDDO' for download dataset", async function () { const output = await runCommand(`npm run cli getDDO ${downloadDatasetDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { expect(output).to.contain(`Resolving Asset with DID: ${downloadDatasetDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for compute dataset", async function() { + it("should get DDO using 'npm run cli getDDO' for compute dataset", async function () { const output = await runCommand(`npm run cli getDDO ${computeDatasetDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { expect(output).to.contain(`Resolving Asset with DID: ${computeDatasetDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function() { + it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function () { const output = await runCommand(`npm run cli getDDO ${jsAlgoDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { - expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for python algorithm", async function() { + it("should get DDO using 'npm run cli getDDO' for python algorithm", async function () { const output = await runCommand(`npm run cli getDDO ${pythonAlgoDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { - expect(output).to.contain(`Resolving Asset with DID: ${pythonAlgoDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + expect(output).to.contain(`Resolving Asset with DID: ${pythonAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should download the download dataset", async function() { + it("should download the download dataset", async function () { this.timeout(10000); // Increase timeout if needed const output = await runCommand(`npm run cli download ${downloadDatasetDid} .`); - + expect(output).to.contain("File downloaded successfully"); // Path to the downloaded file @@ -227,6 +265,6 @@ describe("Ocean CLI Publishing", function() { // Clean up downloaded original file fs.unlinkSync(originalFilePath); - + }); }); diff --git a/test/interactivePublishFlow.ts b/test/interactivePublishFlow.ts index b8cd82e..9c238a2 100644 --- a/test/interactivePublishFlow.ts +++ b/test/interactivePublishFlow.ts @@ -7,13 +7,13 @@ import { fileURLToPath } from 'url' const __filename = fileURLToPath(import.meta.url) const __dirname = dirname(__filename) -describe("Ocean CLI Interactive Publishing", function() { +describe("Ocean CLI Interactive Publishing", function () { this.timeout(120000); // Set a longer timeout to allow for user input simulation const projectRoot = path.resolve(__dirname, ".."); let publishedDid: string; - it("should publish an asset using 'npm run cli start' interactive flow", function(done) { + it("should publish an asset using 'npm run cli start' interactive flow", function (done) { process.env.PRIVATE_KEY = "0x1d751ded5a32226054cd2e71261039b65afb9ee1c746d055dd699b1150a5befc"; process.env.RPC = "http://127.0.0.1:8545"; process.env.NODE_URL = "http://127.0.0.1:8001"; @@ -43,7 +43,9 @@ describe("Ocean CLI Interactive Publishing", function() { if (child.stdin) { const inputInterval = setInterval(() => { if (inputIndex < inputs.length) { - child.stdin.write(inputs[inputIndex]); + if (child.stdin) { + child.stdin.write(inputs[inputIndex]); + } inputIndex++; } else { clearInterval(inputInterval); @@ -61,7 +63,7 @@ describe("Ocean CLI Interactive Publishing", function() { expect(code).to.equal(0); expect(fullOutput).to.contain("Asset successfully published with DID:"); expect(fullOutput).to.contain("Metadata successfully updated for DID:"); - + const match = fullOutput.match(/did:op:[a-f0-9]{64}/); if (match) { publishedDid = match[0]; @@ -77,7 +79,7 @@ describe("Ocean CLI Interactive Publishing", function() { }); }); - it("should get DDO using 'npm run cli getDDO' for the published asset", function(done) { + it("should get DDO using 'npm run cli getDDO' for the published asset", function (done) { exec(`npm run cli getDDO ${publishedDid}`, { cwd: projectRoot }, (error, stdout) => { try { expect(stdout).to.contain(`${publishedDid}`); diff --git a/test/paidComputeFlow.test.ts b/test/paidComputeFlow.test.ts index 8847cb3..e3b0799 100644 --- a/test/paidComputeFlow.test.ts +++ b/test/paidComputeFlow.test.ts @@ -3,13 +3,13 @@ import path from "path"; import fs from "fs"; import { homedir } from 'os' import { - ProviderInstance + ProviderInstance } from "@oceanprotocol/lib"; import { projectRoot, runCommand } from "./util.js"; -describe("Ocean CLI Paid Compute", function() { +describe("Ocean CLI Paid Compute", function () { this.timeout(600000); // Set a longer timeout to allow the command to execute let computeDatasetDid: string; @@ -23,15 +23,15 @@ describe("Ocean CLI Paid Compute", function() { const getAddresses = () => { const data = JSON.parse( fs.readFileSync( - process.env.ADDRESS_FILE || + process.env.ADDRESS_FILE || `${homedir}/.ocean/ocean-contracts/artifacts/address.json`, - 'utf8' + 'utf8' ) ) return data.development }; - it("should publish a compute dataset using 'npm run cli publish'", async function() { + it("should publish a compute dataset using 'npm run cli publish'", async function () { const metadataFile = path.resolve(projectRoot, "metadata/simpleComputeDataset.json"); // Ensure the metadata file exists if (!fs.existsSync(metadataFile)) { @@ -46,21 +46,21 @@ describe("Ocean CLI Paid Compute", function() { const output = await runCommand(`npm run cli publish ${metadataFile}`); - const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find compute dataset did in the output"); - } - - try { - computeDatasetDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + console.error("Raw output:", output); + throw new Error("Could not find compute dataset did in the output"); + } + + try { + computeDatasetDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function() { + it("should publish a js Algorithm using 'npm run cli publishAlgo'", async function () { const filePath = path.resolve(projectRoot, "metadata/jsAlgo.json"); // Ensure the metadata file exists @@ -70,92 +70,94 @@ describe("Ocean CLI Paid Compute", function() { const output = await runCommand(`npm run cli publishAlgo ${filePath}`); - const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find algo did in the output"); - } + const jsonMatch = output.match(/did:op:[a-f0-9]{64}/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find algo did in the output"); + } try { - jsAlgoDid = jsonMatch[0]; - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + jsAlgoDid = jsonMatch[0]; + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for compute dataset", async function() { + it("should get DDO using 'npm run cli getDDO' for compute dataset", async function () { const output = await runCommand(`npm run cli getDDO ${computeDatasetDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { expect(output).to.contain(`Resolving Asset with DID: ${computeDatasetDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function() { + it("should get DDO using 'npm run cli getDDO' for JS algorithm", async function () { const output = await runCommand(`npm run cli getDDO ${jsAlgoDid}`); - const jsonMatch = output.match(/s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find ddo in the output"); - } + const jsonMatch = output.match(/s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find ddo in the output"); + } try { - expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) - } catch (error) { - console.error("Extracted output:", jsonMatch[0]); - throw new Error("Failed to parse the extracted output:\n" + error); - } + expect(output).to.contain(`Resolving Asset with DID: ${jsAlgoDid}`) + } catch (error) { + console.error("Extracted output:", jsonMatch[0]); + throw new Error("Failed to parse the extracted output:\n" + error); + } }); - it("should get compute environments using 'npm run cli getComputeEnvironments'", async function() { + it("should get compute environments using 'npm run cli getComputeEnvironments'", async function () { const output = await runCommand(`npm run cli getComputeEnvironments`); - const jsonMatch = output.match(/Exiting compute environments:\s*([\s\S]*)/); - if (!jsonMatch) { - console.error("Raw output:", output); - throw new Error("Could not find compute environments in the output"); - } - - let environments; - try { - environments = eval(jsonMatch[1]); - } catch (error) { - console.error(`Extracted output: ${jsonMatch[0]} and final result: ${jsonMatch[1]}`); - throw new Error("Failed to parse the extracted output:\n" + error); - } - - expect(environments).to.be.an("array").that.is.not.empty; - - const firstEnv = environments[0]; - - expect(firstEnv).to.have.property("id").that.is.a("string"); - expect(firstEnv).to.have.property("consumerAddress").that.is.a("string"); - expect(firstEnv).to.have.property("resources").that.is.an("array"); - - computeEnvId = firstEnv.id; - - console.log(`Fetched Compute Env ID: ${computeEnvId}`); + const jsonMatch = output.match(/Exiting compute environments:\s*([\s\S]*)/); + if (!jsonMatch) { + console.error("Raw output:", output); + throw new Error("Could not find compute environments in the output"); + } + + let environments; + try { + environments = eval(jsonMatch[1]); + } catch (error) { + console.error(`Extracted output: ${jsonMatch[0]} and final result: ${jsonMatch[1]}`); + throw new Error("Failed to parse the extracted output:\n" + error); + } + + expect(environments).to.be.an("array").that.is.not.empty; + + const firstEnv = environments[0]; + + expect(firstEnv).to.have.property("id").that.is.a("string"); + expect(firstEnv).to.have.property("consumerAddress").that.is.a("string"); + expect(firstEnv).to.have.property("resources").that.is.an("array"); + + computeEnvId = firstEnv.id; + + console.log(`Fetched Compute Env ID: ${computeEnvId}`); }); - it("should start paid compute on compute dataset and algorithm", async function() { + it("should start paid compute on compute dataset and algorithm with services id for dataset and algorithm", async function () { const computeEnvs = await ProviderInstance.getComputeEnvironments('http://127.0.0.1:8001'); const env = computeEnvs[0]; expect(env).to.be.an('object').and.to.not.be.null.and.to.not.be.undefined; resources = [] const paymentToken = getAddresses().Ocean - const output = await runCommand(`npm run cli -- startCompute ${computeDatasetDid} ${jsAlgoDid} ${computeEnvId} 900 ${paymentToken} '${JSON.stringify(resources)}' --accept true`); + const serviceIdDataset = 'ccb398c50d6abd5b456e8d7242bd856a1767a890b537c2f8c10ba8b8a10e6025' + const serviceIdAlgorithm = 'db164c1b981e4d2974e90e61bda121512e6909c1035c908d68933ae4cfaba6b0' + const output = await runCommand(`npm run cli -- startCompute ${computeDatasetDid} ${jsAlgoDid} ${computeEnvId} 900 ${paymentToken} '${JSON.stringify(resources)}' ${serviceIdDataset} ${serviceIdAlgorithm} --accept true`); const jobIdMatch = output.match(/JobID:\s*([^\s]+)/); const agreementIdMatch = output.match(/Agreement ID:\s*([^\s]+)/); @@ -194,24 +196,24 @@ describe("Ocean CLI Paid Compute", function() { }).timeout(10200) it("should get job status", async () => { - const output = await runCommand(`npm run cli getJobStatus ${computeDatasetDid} ${computeJobId} ''`); - expect(output).to.contain(computeJobId); - expect(output.toLowerCase()).to.match(/status/); - console.log(`Job status retrieved for jobId: ${computeJobId}`); - }); + const output = await runCommand(`npm run cli getJobStatus ${computeDatasetDid} ${computeJobId} ''`); + expect(output).to.contain(computeJobId); + expect(output.toLowerCase()).to.match(/status/); + console.log(`Job status retrieved for jobId: ${computeJobId}`); + }); - it("should download compute job results", async () => { - const destFolder = path.join(projectRoot, "test-results", computeJobId); - fs.mkdirSync(destFolder, { recursive: true }); + it("should download compute job results", async () => { + const destFolder = path.join(projectRoot, "test-results", computeJobId); + fs.mkdirSync(destFolder, { recursive: true }); - const output = await runCommand(`npm run cli downloadJobResults ${computeJobId} 1 ${destFolder}`); + const output = await runCommand(`npm run cli downloadJobResults ${computeJobId} 1 ${destFolder}`); - expect(output.toLowerCase()).to.match(/download(ed)?/); + expect(output.toLowerCase()).to.match(/download(ed)?/); - const files = fs.readdirSync(destFolder); - expect(files.length).to.be.greaterThan(0, "No result files downloaded"); - console.log(`Downloaded results to: ${destFolder}`); + const files = fs.readdirSync(destFolder); + expect(files.length).to.be.greaterThan(0, "No result files downloaded"); + console.log(`Downloaded results to: ${destFolder}`); fs.rmSync(path.join(projectRoot, "test-results"), { recursive: true }) - }); - + }); + }); diff --git a/test/setup.test.ts b/test/setup.test.ts index 8702483..c0452c6 100644 --- a/test/setup.test.ts +++ b/test/setup.test.ts @@ -42,7 +42,7 @@ describe("Ocean CLI Setup", function() { expect(stdout).to.contain("Starts a FREE compute job"); expect(stdout).to.contain("stopCompute [options] "); expect(stdout).to.contain("Stops a compute job"); - expect(stdout).to.contain("getJobStatus [options] "); + expect(stdout).to.contain("getJobStatus [options] [agreementId]"); expect(stdout).to.contain("Displays the compute job status"); expect(stdout).to.contain("downloadJobResults [destinationFolder]"); expect(stdout).to.contain("Downloads compute job results");