Skip to content

Commit bb7e8b9

Browse files
feat: index bulks
1 parent 9fa6f2c commit bb7e8b9

File tree

5 files changed

+327
-152
lines changed

5 files changed

+327
-152
lines changed

schema.graphql

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,24 @@ type Deal @entity {
338338
workerpoolorder: WorkerpoolOrder # todo: not available if not broadcasted
339339
requestorder: RequestOrder # todo: not available if not broadcasted
340340
events: [DealEvent!]! @derivedFrom(field: "deal")
341+
bulk: Bulk
342+
}
343+
344+
type Bulk @entity(immutable: true) {
345+
id: ID!
346+
hash: String!
347+
content: String!
348+
slices: [BulkSlice!] @derivedFrom(field: "bulk")
349+
deals: [Deal!]! @derivedFrom(field: "bulk")
350+
}
351+
352+
type BulkSlice @entity(immutable: true) {
353+
id: ID!
354+
hash: String!
355+
content: String!
356+
bulk: Bulk!
357+
index: BigInt!
358+
datasets: [Dataset!]
341359
}
342360

343361
enum TaskStatus {

src/Modules/Bulk.ts

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,103 @@
1+
import {
2+
BigInt,
3+
Bytes,
4+
dataSource,
5+
DataSourceContext,
6+
DataSourceTemplate,
7+
json,
8+
JSONValueKind,
9+
} from '@graphprotocol/graph-ts';
10+
import { Bulk, BulkSlice } from '../../generated/schema';
11+
import { CONTEXT_BULK, CONTEXT_INDEX, CONTEXT_REQUESTHASH, createBulkSliceID } from '../utils';
12+
13+
function isIntegerString(str: string): boolean {
14+
// empty string is not valid
15+
if (str.length == 0) {
16+
return false;
17+
}
18+
// 0 prefixed string is not valid
19+
if (str[0] === '0' && str.length > 1) {
20+
return false;
21+
}
22+
// non numeric character is not valid
23+
for (let i = 0; i < str.length; i++) {
24+
if (!['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'].includes(str[i])) {
25+
return false;
26+
}
27+
}
28+
return true;
29+
}
30+
31+
export function handleBulk(content: Bytes): void {
32+
const hash = dataSource.stringParam();
33+
const context = dataSource.context();
34+
const requestorder = context.getString(CONTEXT_REQUESTHASH);
35+
36+
// multiple deals using same requestorder use the same bulk, we use requestorderHash as bulk ID
37+
const bulkid = requestorder;
38+
39+
let bulk = new Bulk(bulkid);
40+
bulk.hash = hash;
41+
bulk.content = content.toString();
42+
43+
const jsonContent = json.try_fromBytes(content);
44+
if (jsonContent.isOk) {
45+
const contentObject = jsonContent.value.toObject();
46+
47+
for (let i = 0; i < contentObject.entries.length; i++) {
48+
const entry = contentObject.entries[i];
49+
50+
if (isIntegerString(entry.key)) {
51+
const index = BigInt.fromString(entry.key);
52+
if (entry.value.kind == JSONValueKind.OBJECT) {
53+
let sliceCid = entry.value.toObject().getEntry('datasets');
54+
if (sliceCid != null && sliceCid.value.kind == JSONValueKind.STRING) {
55+
let sliceContext = new DataSourceContext();
56+
sliceContext.setString(CONTEXT_BULK, bulkid);
57+
sliceContext.setBigInt(CONTEXT_INDEX, index);
58+
DataSourceTemplate.createWithContext(
59+
'BulkSlice',
60+
[sliceCid.value.toString()],
61+
sliceContext,
62+
);
63+
}
64+
}
65+
}
66+
}
67+
}
68+
69+
bulk.save();
70+
}
71+
72+
export function handleBulkSlice(content: Bytes): void {
73+
const hash = dataSource.stringParam();
74+
const context = dataSource.context();
75+
const bulk = context.getString(CONTEXT_BULK);
76+
const index = context.getBigInt(CONTEXT_INDEX);
77+
78+
let bulkSlice = new BulkSlice(createBulkSliceID(bulk, index));
79+
bulkSlice.hash = hash;
80+
bulkSlice.bulk = bulk;
81+
bulkSlice.index = index;
82+
bulkSlice.content = content.toString();
83+
84+
const jsonContent = json.try_fromBytes(content);
85+
if (jsonContent.isOk && jsonContent.value.kind == JSONValueKind.ARRAY) {
86+
const datasetArray = jsonContent.value.toArray();
87+
for (let i = 0; i < datasetArray.length; i++) {
88+
const dataset = datasetArray[i];
89+
if (dataset.kind == JSONValueKind.STRING) {
90+
let datasetAddress = dataset.toString().toLowerCase();
91+
let datasets = bulkSlice.datasets;
92+
if (datasets == null) {
93+
datasets = new Array<string>();
94+
}
95+
// dataset address may be invalid, this is not an issue, the model will prune it
96+
datasets.push(datasetAddress);
97+
bulkSlice.datasets = datasets;
98+
}
99+
}
100+
}
101+
102+
bulkSlice.save();
103+
}

src/Modules/IexecPoco.ts

Lines changed: 30 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,14 @@
11
// SPDX-FileCopyrightText: 2020-2025 IEXEC BLOCKCHAIN TECH <[email protected]>
22
// SPDX-License-Identifier: Apache-2.0
33

4-
import { Address, BigInt, dataSource } from '@graphprotocol/graph-ts';
4+
import {
5+
Address,
6+
BigInt,
7+
dataSource,
8+
DataSourceContext,
9+
DataSourceTemplate,
10+
json,
11+
} from '@graphprotocol/graph-ts';
512
const chainName = dataSource.network();
613

714
import {
@@ -21,6 +28,7 @@ import {
2128

2229
import {
2330
AccurateContribution,
31+
Bulk,
2432
FaultyContribution,
2533
OrdersMatched,
2634
SchedulerNotice,
@@ -34,6 +42,7 @@ import {
3442
} from '../../generated/schema';
3543

3644
import {
45+
CONTEXT_REQUESTHASH,
3746
createContributionID,
3847
createEventID,
3948
fetchAccount,
@@ -94,6 +103,26 @@ export function handleOrdersMatched(event: OrdersMatchedEvent): void {
94103
deal.timestamp = event.block.timestamp;
95104
deal.save();
96105

106+
// params including bulk_cid reference a dataset bulk
107+
const params = json.try_fromString(viewedDeal.params);
108+
if (params.isOk) {
109+
const bulkCid = params.value.toObject().getEntry('bulk_cid');
110+
if (bulkCid) {
111+
// the same bulk is used by any deal using the same requestorder => we use requestorderHash as bulk ID
112+
const bulkId = event.params.requestHash.toHex();
113+
// create the bulk if not existing yet
114+
const indexedBulk = Bulk.load(bulkId);
115+
if (!indexedBulk) {
116+
let context = new DataSourceContext();
117+
context.setString(CONTEXT_REQUESTHASH, bulkId);
118+
DataSourceTemplate.createWithContext('Bulk', [bulkCid.value.toString()], context);
119+
}
120+
// bulk may not be indexed, this is not an issue, the model will prune it
121+
deal.bulk = bulkId;
122+
deal.save();
123+
}
124+
}
125+
97126
const dataset = deal.dataset;
98127

99128
let apporder = fetchApporder(event.params.appHash.toHex());

0 commit comments

Comments
 (0)