Skip to content

Commit a86e38f

Browse files
committed
integration tests
1 parent 7eaa53c commit a86e38f

File tree

10 files changed

+872
-1
lines changed

10 files changed

+872
-1
lines changed

coordinator/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator
1+
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator libzkp
22

33
IMAGE_VERSION=latest
44
REPO_ROOT_DIR=./..

crates/prover-bin/src/main.rs

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ use scroll_proving_sdk::{
88
prover::ProverBuilder,
99
utils::{get_version, init_tracing},
1010
};
11+
use std::{fs::File, path::Path, io::BufReader};
1112

1213
#[derive(Parser, Debug)]
1314
#[command(disable_version_flag = true)]
@@ -38,6 +39,17 @@ enum Commands {
3839
/// path to save the verifier's asset
3940
asset_path: String,
4041
},
42+
Handle {
43+
/// path to save the verifier's asset
44+
task_path: String,
45+
},
46+
}
47+
48+
#[derive(Debug, serde::Deserialize)]
49+
struct HandleSet {
50+
chunks: Vec<String>,
51+
batches: Vec<String>,
52+
bundles: Vec<String>,
4153
}
4254

4355
#[tokio::main]
@@ -62,6 +74,25 @@ async fn main() -> eyre::Result<()> {
6274
println!("dump assets for {fork_name} into {asset_path}");
6375
local_prover.dump_verifier_assets(&fork_name, asset_path.as_ref())?;
6476
}
77+
Some(Commands::Handle { task_path }) => {
78+
let file = File::open(Path::new(&task_path))?;
79+
let reader = BufReader::new(file);
80+
let handle_set: HandleSet = serde_json::from_reader(reader)?;
81+
82+
let prover = ProverBuilder::new(sdk_config, local_prover)
83+
.build()
84+
.await
85+
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
86+
87+
let prover = std::sync::Arc::new(prover);
88+
println!("Handling task set 1: chunks ...");
89+
prover.clone().one_shot(&handle_set.chunks).await;
90+
println!("Done! Handling task set 2: batches ...");
91+
prover.clone().one_shot(&handle_set.batches).await;
92+
println!("Done! Handling task set 3: bundles ...");
93+
prover.clone().one_shot(&handle_set.bundles).await;
94+
println!("All done!");
95+
}
6596
None => {
6697
let prover = ProverBuilder::new(sdk_config, local_prover)
6798
.build()
Lines changed: 189 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,189 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"math/rand"
6+
"sort"
7+
8+
"gorm.io/gorm"
9+
10+
"github.com/scroll-tech/da-codec/encoding"
11+
"github.com/scroll-tech/go-ethereum/common"
12+
"github.com/scroll-tech/go-ethereum/log"
13+
14+
"scroll-tech/common/database"
15+
"scroll-tech/rollup/internal/orm"
16+
"scroll-tech/rollup/internal/utils"
17+
)
18+
19+
type importRecord struct {
20+
Chunk []string `json:"chunks"`
21+
Batch []string `json:"batches"`
22+
Bundle []string `json:"bundles"`
23+
}
24+
25+
func randomPickKfromN(n, k int, rng *rand.Rand) []int {
26+
ret := make([]int, n-1)
27+
for i := 1; i < n; i++ {
28+
ret[i-1] = i
29+
}
30+
31+
rng.Shuffle(len(ret), func(i, j int) {
32+
ret[i], ret[j] = ret[j], ret[i]
33+
})
34+
35+
ret = ret[:k-1]
36+
sort.Ints(ret)
37+
38+
return ret
39+
}
40+
41+
func importData(ctx context.Context, beginBlk, endBlk uint64, chkNum, batchNum, bundleNum int, seed int64) (*importRecord, error) {
42+
43+
db, err := database.InitDB(cfg.DBConfig)
44+
if err != nil {
45+
return nil, err
46+
}
47+
ret := &importRecord{}
48+
// Create a new random source with the provided seed
49+
source := rand.NewSource(seed)
50+
rng := rand.New(source)
51+
52+
chkSepIdx := randomPickKfromN(int(endBlk-beginBlk)+1, chkNum, rng)
53+
chkSep := make([]uint64, len(chkSepIdx))
54+
for i, ind := range chkSepIdx {
55+
chkSep[i] = beginBlk + uint64(ind)
56+
}
57+
chkSep = append(chkSep, endBlk)
58+
59+
log.Info("separated chunk", "border", chkSep)
60+
head := beginBlk
61+
lastMsgHash := common.Hash{}
62+
63+
ormChks := make([]*orm.Chunk, 0, chkNum)
64+
encChks := make([]*encoding.Chunk, 0, chkNum)
65+
for _, edBlk := range chkSep {
66+
ormChk, chk, err := importChunk(ctx, db, head, edBlk-1, lastMsgHash)
67+
if err != nil {
68+
return nil, err
69+
}
70+
lastMsgHash = chk.PostL1MessageQueueHash
71+
ormChks = append(ormChks, ormChk)
72+
encChks = append(encChks, chk)
73+
head = edBlk
74+
}
75+
76+
for _, chk := range ormChks {
77+
ret.Chunk = append(ret.Chunk, chk.Hash)
78+
}
79+
80+
batchSep := randomPickKfromN(chkNum, batchNum, rng)
81+
batchSep = append(batchSep, batchNum)
82+
log.Info("separated batch", "border", batchSep)
83+
84+
headChk := int(0)
85+
batches := make([]*orm.Batch, 0, batchNum)
86+
var lastBatch *orm.Batch
87+
for _, endChk := range batchSep {
88+
batch, err := importBatch(ctx, db, ormChks[headChk:endChk], encChks[headChk:endChk], lastBatch)
89+
if err != nil {
90+
return nil, err
91+
}
92+
lastBatch = batch
93+
batches = append(batches, batch)
94+
headChk = endChk
95+
}
96+
97+
for _, batch := range batches {
98+
ret.Batch = append(ret.Batch, batch.Hash)
99+
}
100+
101+
bundleSep := randomPickKfromN(batchNum, bundleNum, rng)
102+
bundleSep = append(bundleSep, bundleNum)
103+
log.Info("separated bundle", "border", bundleSep)
104+
105+
headBatch := int(0)
106+
for _, endBatch := range bundleSep {
107+
hash, err := importBundle(ctx, db, batches[headBatch:endBatch])
108+
if err != nil {
109+
return nil, err
110+
}
111+
ret.Bundle = append(ret.Bundle, hash)
112+
headBatch = endBatch
113+
}
114+
115+
return ret, nil
116+
}
117+
118+
func importChunk(ctx context.Context, db *gorm.DB, beginBlk, endBlk uint64, prevMsgQueueHash common.Hash) (*orm.Chunk, *encoding.Chunk, error) {
119+
nblk := int(endBlk-beginBlk) + 1
120+
blockOrm := orm.NewL2Block(db)
121+
122+
blks, err := blockOrm.GetL2BlocksGEHeight(ctx, beginBlk, nblk)
123+
124+
if err != nil {
125+
return nil, nil, err
126+
}
127+
128+
postHash, err := encoding.MessageQueueV2ApplyL1MessagesFromBlocks(prevMsgQueueHash, blks)
129+
if err != nil {
130+
return nil, nil, err
131+
}
132+
133+
theChunk := &encoding.Chunk{
134+
Blocks: blks,
135+
PrevL1MessageQueueHash: prevMsgQueueHash,
136+
PostL1MessageQueueHash: postHash,
137+
}
138+
chunkOrm := orm.NewChunk(db)
139+
140+
dbChk, err := chunkOrm.InsertChunk(ctx, theChunk, codecCfg, utils.ChunkMetrics{})
141+
if err != nil {
142+
return nil, nil, err
143+
}
144+
log.Info("insert chunk", "From", beginBlk, "To", endBlk)
145+
return dbChk, theChunk, nil
146+
}
147+
148+
func importBatch(ctx context.Context, db *gorm.DB, chks []*orm.Chunk, encChks []*encoding.Chunk, last *orm.Batch) (*orm.Batch, error) {
149+
150+
batchOrm := orm.NewBatch(db)
151+
152+
var index uint64
153+
var parentHash common.Hash
154+
if last != nil {
155+
index = last.Index + 1
156+
parentHash = common.HexToHash(last.Hash)
157+
}
158+
159+
var blks []*encoding.Block
160+
for _, chk := range encChks {
161+
blks = append(blks, chk.Blocks...)
162+
}
163+
164+
batch := &encoding.Batch{
165+
Index: index,
166+
TotalL1MessagePoppedBefore: chks[0].TotalL1MessagesPoppedBefore,
167+
ParentBatchHash: parentHash,
168+
Chunks: encChks,
169+
Blocks: blks,
170+
}
171+
172+
dbBatch, err := batchOrm.InsertBatch(ctx, batch, codecCfg, utils.BatchMetrics{})
173+
if err != nil {
174+
return nil, err
175+
}
176+
log.Info("insert batch", "index", index)
177+
return dbBatch, nil
178+
}
179+
180+
func importBundle(ctx context.Context, db *gorm.DB, batches []*orm.Batch) (string, error) {
181+
182+
bundleOrm := orm.NewBundle(db)
183+
bundle, err := bundleOrm.InsertBundle(ctx, batches, codecCfg)
184+
if err != nil {
185+
return "", err
186+
}
187+
log.Info("insert bundle", "hash", bundle.Hash)
188+
return bundle.Hash, nil
189+
}

0 commit comments

Comments
 (0)