Skip to content

Commit a538b9e

Browse files
mangasneysofu
authored andcommitted
fix false positive POI (#3951)
1 parent 9e03a50 commit a538b9e

File tree

10 files changed

+225
-17
lines changed

10 files changed

+225
-17
lines changed

core/src/subgraph/trigger_processor.rs

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use graph::blockchain::Blockchain;
33
use graph::cheap_clone::CheapClone;
44
use graph::components::store::SubgraphFork;
55
use graph::components::subgraph::{MappingError, SharedProofOfIndexing};
6-
use graph::data_source::TriggerData;
6+
use graph::data_source::{MappingTrigger, TriggerData, TriggerWithHandler};
77
use graph::prelude::tokio::time::Instant;
88
use graph::prelude::{
99
BlockState, RuntimeHost, RuntimeHostBuilder, SubgraphInstanceMetrics, TriggerProcessor,
@@ -33,11 +33,7 @@ where
3333
) -> Result<BlockState<C>, MappingError> {
3434
let error_count = state.deterministic_errors.len();
3535

36-
if let Some(proof_of_indexing) = proof_of_indexing {
37-
proof_of_indexing
38-
.borrow_mut()
39-
.start_handler(causality_region);
40-
}
36+
let mut host_mapping: Vec<(&T::Host, TriggerWithHandler<MappingTrigger<C>>)> = vec![];
4137

4238
for host in hosts {
4339
let mapping_trigger = match host.match_and_decode(trigger, block, logger)? {
@@ -48,6 +44,20 @@ where
4844
None => continue,
4945
};
5046

47+
host_mapping.push((&host, mapping_trigger));
48+
}
49+
50+
if host_mapping.is_empty() {
51+
return Ok(state);
52+
}
53+
54+
if let Some(proof_of_indexing) = proof_of_indexing {
55+
proof_of_indexing
56+
.borrow_mut()
57+
.start_handler(causality_region);
58+
}
59+
60+
for (host, mapping_trigger) in host_mapping {
5161
let start = Instant::now();
5262
state = host
5363
.process_mapping_trigger(

store/postgres/src/subgraph_store.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ impl SubgraphStore {
228228
}
229229
}
230230

231-
pub(crate) async fn get_proof_of_indexing(
231+
pub async fn get_proof_of_indexing(
232232
&self,
233233
id: &DeploymentHash,
234234
indexer: &Option<Address>,
Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
[
2+
{
3+
"inputs": [],
4+
"stateMutability": "nonpayable",
5+
"type": "constructor"
6+
},
7+
{
8+
"anonymous": false,
9+
"inputs": [
10+
{
11+
"indexed": false,
12+
"internalType": "uint16",
13+
"name": "x",
14+
"type": "uint16"
15+
}
16+
],
17+
"name": "Trigger",
18+
"type": "event"
19+
},
20+
{
21+
"inputs": [
22+
{
23+
"internalType": "uint16",
24+
"name": "x",
25+
"type": "uint16"
26+
}
27+
],
28+
"name": "emitTrigger",
29+
"outputs": [],
30+
"stateMutability": "nonpayable",
31+
"type": "function"
32+
}
33+
]
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
{
2+
"name": "dynamic-data-source",
3+
"version": "0.1.0",
4+
"scripts": {
5+
"build-contracts": "../common/build-contracts.sh",
6+
"codegen": "graph codegen",
7+
"test": "yarn build-contracts && truffle test --compile-none --network test",
8+
"create:test": "graph create test/dynamic-data-source --node $GRAPH_NODE_ADMIN_URI",
9+
"deploy:test": "graph deploy test/dynamic-data-source --version-label v0.0.1 --ipfs $IPFS_URI --node $GRAPH_NODE_ADMIN_URI"
10+
},
11+
"devDependencies": {
12+
"@graphprotocol/graph-cli": "https://github.com/graphprotocol/graph-cli#main",
13+
"@graphprotocol/graph-ts": "https://github.com/graphprotocol/graph-ts#main",
14+
"solc": "^0.8.2"
15+
},
16+
"dependencies": {
17+
"@truffle/contract": "^4.3",
18+
"@truffle/hdwallet-provider": "^1.2",
19+
"apollo-fetch": "^0.7.0",
20+
"babel-polyfill": "^6.26.0",
21+
"babel-register": "^6.26.0",
22+
"gluegun": "^4.6.1",
23+
"truffle": "^5.2"
24+
}
25+
}
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
type Foo @entity {
2+
id: ID!
3+
value: String!
4+
}
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import { Trigger } from "../generated/Contract/Contract";
2+
import {Foo} from "../generated/schema";
3+
4+
5+
export function handleTrigger(event: Trigger): void {
6+
let id = `${event.block.hash.toHexString()}${event.address.toHexString()}`;
7+
let foo = new Foo(id);
8+
foo.save();
9+
}
10+
11+
12+
13+
14+
15+
16+
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
specVersion: 0.0.4
2+
schema:
3+
file: ./schema.graphql
4+
dataSources:
5+
- kind: ethereum/contract
6+
name: Contract
7+
network: test
8+
source:
9+
address: "0xCfEB869F69431e42cdB54A4F4f105C19C080A601"
10+
abi: Contract
11+
mapping:
12+
kind: ethereum/events
13+
apiVersion: 0.0.6
14+
language: wasm/assemblyscript
15+
abis:
16+
- name: Contract
17+
file: ./abis/Contract.abi
18+
entities:
19+
- Call
20+
eventHandlers:
21+
- event: Trigger(uint16)
22+
handler: handleTrigger
23+
file: ./src/mapping.ts
24+
templates:
25+
- kind: ethereum/contract
26+
name: Dynamic
27+
network: test
28+
source:
29+
abi: Contract
30+
mapping:
31+
kind: ethereum/events
32+
apiVersion: 0.0.6
33+
language: wasm/assemblyscript
34+
abis:
35+
- name: Contract
36+
file: ./abis/Contract.abi
37+
entities:
38+
- Call
39+
eventHandlers:
40+
- event: Trigger(uint16)
41+
handler: handleTrigger
42+
file: ./src/mapping.ts

tests/integration-tests/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
"remove-then-update",
1313
"typename",
1414
"value-roundtrip",
15+
"dynamic-data-source",
1516
"file-data-sources"
1617
]
1718
}

tests/src/fixture.rs

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,15 @@ use graph::cheap_clone::CheapClone;
2020
use graph::components::store::{BlockStore, DeploymentLocator};
2121
use graph::data::graphql::effort::LoadManager;
2222
use graph::data::query::{Query, QueryTarget};
23-
use graph::env::ENV_VARS;
23+
use graph::env::EnvVars;
2424
use graph::ipfs_client::IpfsClient;
2525
use graph::prelude::ethabi::ethereum_types::H256;
2626
use graph::prelude::{
2727
async_trait, r, ApiVersion, BlockNumber, DeploymentHash, GraphQlRunner as _, LoggerFactory,
2828
MetricsRegistry, NodeId, QueryError, SubgraphAssignmentProvider, SubgraphName,
2929
SubgraphRegistrar, SubgraphStore as _, SubgraphVersionSwitchingMode,
3030
};
31+
use graph::slog::crit;
3132
use graph_core::polling_monitor::ipfs_service::IpfsService;
3233
use graph_core::{
3334
LinkResolver, SubgraphAssignmentProvider as IpfsSubgraphAssignmentProvider,
@@ -38,7 +39,7 @@ use graph_mock::MockMetricsRegistry;
3839
use graph_node::manager::PanicSubscriptionManager;
3940
use graph_node::{config::Config, store_builder::StoreBuilder};
4041
use graph_store_postgres::{ChainHeadUpdateListener, ChainStore, Store, SubgraphStore};
41-
use slog::{crit, info, Logger};
42+
use slog::{info, Logger};
4243
use std::env::VarError;
4344
use std::pin::Pin;
4445
use std::sync::Arc;
@@ -216,7 +217,13 @@ pub async fn setup<C: Blockchain>(
216217
stores: &Stores,
217218
chain: Arc<C>,
218219
graft_block: Option<BlockPtr>,
220+
env_vars: Option<EnvVars>,
219221
) -> TestContext {
222+
let env_vars = match env_vars {
223+
Some(ev) => ev,
224+
None => EnvVars::from_env().unwrap(),
225+
};
226+
220227
let logger = graph::log::logger(true);
221228
let logger_factory = LoggerFactory::new(logger.clone(), None);
222229
let mock_registry: Arc<dyn MetricsRegistry> = Arc::new(MockMetricsRegistry::new());
@@ -229,7 +236,7 @@ pub async fn setup<C: Blockchain>(
229236
let mut blockchain_map = BlockchainMap::new();
230237
blockchain_map.insert(stores.network_name.clone(), chain);
231238

232-
let static_filters = ENV_VARS.experimental_static_filters;
239+
let static_filters = env_vars.experimental_static_filters;
233240

234241
let ipfs = IpfsClient::localhost();
235242
let link_resolver = Arc::new(LinkResolver::new(
@@ -238,9 +245,9 @@ pub async fn setup<C: Blockchain>(
238245
));
239246
let ipfs_service = IpfsService::new(
240247
ipfs,
241-
ENV_VARS.mappings.max_ipfs_file_bytes as u64,
242-
ENV_VARS.mappings.ipfs_timeout,
243-
ENV_VARS.mappings.max_ipfs_concurrent_requests,
248+
env_vars.mappings.max_ipfs_file_bytes as u64,
249+
env_vars.mappings.ipfs_timeout,
250+
env_vars.mappings.max_ipfs_concurrent_requests,
244251
);
245252

246253
let blockchain_map = Arc::new(blockchain_map);

tests/tests/runner.rs

Lines changed: 74 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ use std::sync::Arc;
22

33
use cid::Cid;
44
use graph::blockchain::{Block, BlockPtr};
5+
use graph::env::EnvVars;
56
use graph::object;
67
use graph::prelude::ethabi::ethereum_types::H256;
78
use graph::prelude::{SubgraphAssignmentProvider, SubgraphName};
@@ -33,7 +34,15 @@ async fn data_source_revert() -> anyhow::Result<()> {
3334
};
3435

3536
let chain = Arc::new(chain(blocks.clone(), &stores).await);
36-
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain.clone(), None).await;
37+
let ctx = fixture::setup(
38+
subgraph_name.clone(),
39+
&hash,
40+
&stores,
41+
chain.clone(),
42+
None,
43+
None,
44+
)
45+
.await;
3746

3847
let stop_block = test_ptr(2);
3948
ctx.start_and_sync_to(stop_block).await;
@@ -51,7 +60,15 @@ async fn data_source_revert() -> anyhow::Result<()> {
5160
)
5261
.await;
5362
let graft_block = Some(test_ptr(3));
54-
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain, graft_block).await;
63+
let ctx = fixture::setup(
64+
subgraph_name.clone(),
65+
&hash,
66+
&stores,
67+
chain,
68+
graft_block,
69+
None,
70+
)
71+
.await;
5572
let stop_block = test_ptr(4);
5673
ctx.start_and_sync_to(stop_block).await;
5774

@@ -97,7 +114,7 @@ async fn typename() -> anyhow::Result<()> {
97114

98115
let stores = stores("./integration-tests/config.simple.toml").await;
99116
let chain = Arc::new(chain(blocks, &stores).await);
100-
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain, None).await;
117+
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain, None, None).await;
101118

102119
ctx.start_and_sync_to(stop_block).await;
103120

@@ -122,7 +139,7 @@ async fn file_data_sources() {
122139
};
123140
let stop_block = test_ptr(1);
124141
let chain = Arc::new(chain(blocks, &stores).await);
125-
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain, None).await;
142+
let ctx = fixture::setup(subgraph_name.clone(), &hash, &stores, chain, None, None).await;
126143
ctx.start_and_sync_to(stop_block).await;
127144

128145
// CID QmVkvoPGi9jvvuxsHDVJDgzPEzagBaWSZRYoRDzU244HjZ is the file
@@ -151,3 +168,56 @@ async fn file_data_sources() {
151168
let stop_block = test_ptr(2);
152169
ctx.start_and_sync_to(stop_block).await;
153170
}
171+
172+
#[tokio::test]
173+
async fn template_static_filters_false_positives() {
174+
let stores = stores("./integration-tests/config.simple.toml").await;
175+
176+
let subgraph_name = SubgraphName::new("dynamic-data-source").unwrap();
177+
let hash = {
178+
let test_dir = format!("./integration-tests/{}", subgraph_name);
179+
fixture::build_subgraph(&test_dir).await
180+
};
181+
182+
let blocks = {
183+
let block_0 = genesis();
184+
let block_1 = empty_block(block_0.ptr(), test_ptr(1));
185+
let block_2 = empty_block(block_1.ptr(), test_ptr(2));
186+
vec![block_0, block_1, block_2]
187+
};
188+
let stop_block = test_ptr(1);
189+
let chain = Arc::new(chain(blocks, &stores).await);
190+
191+
let mut env_vars = EnvVars::default();
192+
env_vars.experimental_static_filters = true;
193+
194+
let ctx = fixture::setup(
195+
subgraph_name.clone(),
196+
&hash,
197+
&stores,
198+
chain,
199+
None,
200+
Some(env_vars),
201+
)
202+
.await;
203+
ctx.start_and_sync_to(stop_block).await;
204+
205+
let poi = ctx
206+
.store
207+
.get_proof_of_indexing(&ctx.deployment.hash, &None, test_ptr(1))
208+
.await
209+
.unwrap();
210+
211+
// This check exists to prevent regression of https://github.com/graphprotocol/graph-node/issues/3963
212+
// when false positives go through the block stream, they should be discarded by
213+
// `DataSource::match_and_decode`. The POI below is generated consistently from the empty
214+
// POI table. If this fails it's likely that either the bug was re-introduced or there is
215+
// a change in the POI infrastructure.
216+
assert_eq!(
217+
poi.unwrap(),
218+
[
219+
196, 173, 167, 52, 226, 19, 154, 61, 189, 94, 19, 229, 18, 7, 0, 252, 234, 49, 110,
220+
179, 105, 64, 16, 46, 25, 194, 83, 94, 195, 225, 56, 252
221+
],
222+
);
223+
}

0 commit comments

Comments
 (0)