Skip to content

Commit 4426d98

Browse files
committed
feat(pyth-lazer-agent) Allow deduplicating updates within each batch
1 parent b9ad8f2 commit 4426d98

File tree

7 files changed

+104
-6
lines changed

7 files changed

+104
-6
lines changed

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

apps/pyth-lazer-agent/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "pyth-lazer-agent"
3-
version = "0.4.0"
3+
version = "0.4.1"
44
edition = "2024"
55
description = "Pyth Lazer Agent"
66
license = "Apache-2.0"

apps/pyth-lazer-agent/README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,12 @@ publish_keypair_path = "/path/to/keypair.json"
4949
authorization_token = "your_token"
5050
listen_address = "0.0.0.0:8910"
5151
publish_interval_duration = "25ms"
52+
enable_update_deduplication = false
5253
```
5354

5455
- `relayers_urls`: The Lazer team will provide these.
5556
- `publish_keypair_path`: The keypair file generated with `solana-keygen` or similar.
5657
- `authorization_token`: The Lazer team will provide this or instruct that it can be omitted.
5758
- `listen_address`: The local port the agent will be listening on; can be anything you want.
5859
- `publisher_interval`: The agent will batch and send transaction bundles at this interval. The Lazer team will provide guidance here.
60+
- `enable_update_deduplication`: The agent will deduplicate updates based inside each batch before sending it to Lazer.
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
relayer_urls = ["wss://relayer.pyth-lazer-staging.dourolabs.app/v1/transaction", "wss://relayer-1.pyth-lazer-staging.dourolabs.app/v1/transaction"]
2-
publish_keypair_path = "/path/to/solana/id.json"
1+
relayer_urls = ["ws://localhost:10001/v1/transaction"]
2+
publish_keypair_path = "/tmp/keypair.json"
33
listen_address = "0.0.0.0:8910"
44
publish_interval_duration = "25ms"
55
authorization_token="token1"

apps/pyth-lazer-agent/src/config.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ pub struct Config {
1919
#[serde(with = "humantime_serde", default = "default_publish_interval")]
2020
pub publish_interval_duration: Duration,
2121
pub history_service_url: Option<Url>,
22+
#[serde(default)]
23+
pub enable_update_deduplication: bool
2224
}
2325

2426
#[derive(Deserialize, Derivative, Clone, PartialEq)]

apps/pyth-lazer-agent/src/jrpc_handle.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -299,6 +299,7 @@ pub mod tests {
299299
publish_keypair_path: Default::default(),
300300
publish_interval_duration: Default::default(),
301301
history_service_url: None,
302+
enable_update_deduplication: false,
302303
};
303304

304305
println!("{:?}", get_metadata(config).await.unwrap());

apps/pyth-lazer-agent/src/lazer_publisher.rs

Lines changed: 95 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use std::collections::HashMap;
12
use crate::config::{CHANNEL_CAPACITY, Config};
23
use crate::relayer_session::RelayerSessionTask;
34
use anyhow::{Context, Result, bail};
@@ -132,8 +133,14 @@ impl LazerPublisherTask {
132133
return Ok(());
133134
}
134135

136+
let updates = if self.config.enable_update_deduplication {
137+
deduplicate_feed_updates(&self.pending_updates.drain(..).collect())?
138+
} else {
139+
self.pending_updates.drain(..).collect()
140+
};
141+
135142
let publisher_update = PublisherUpdate {
136-
updates: self.pending_updates.drain(..).collect(),
143+
updates,
137144
publisher_timestamp: MessageField::some(Timestamp::now()),
138145
special_fields: Default::default(),
139146
};
@@ -173,10 +180,33 @@ impl LazerPublisherTask {
173180
}
174181
}
175182

183+
fn deduplicate_feed_updates(feed_updates: &Vec<FeedUpdate>) -> Result<Vec<FeedUpdate>> {
184+
let mut deduped_feed_updates = Vec::new();
185+
let mut last_feed_update = HashMap::new();
186+
187+
// assume that feed_updates is already sorted by ts (within feed_update_id groups)
188+
for feed_update in feed_updates {
189+
let feed_id = feed_update.feed_id.context("feed_id is required")?;
190+
191+
if let Some(update) = feed_update.update.as_ref() {
192+
if let Some(last_update) = last_feed_update.get(&feed_id) {
193+
if update == last_update {
194+
continue;
195+
}
196+
}
197+
198+
deduped_feed_updates.push(feed_update.clone());
199+
last_feed_update.insert(feed_id, update.clone());
200+
}
201+
}
202+
203+
Ok(deduped_feed_updates)
204+
}
205+
176206
#[cfg(test)]
177207
mod tests {
178208
use crate::config::{CHANNEL_CAPACITY, Config};
179-
use crate::lazer_publisher::LazerPublisherTask;
209+
use crate::lazer_publisher::{deduplicate_feed_updates, LazerPublisherTask};
180210
use ed25519_dalek::SigningKey;
181211
use protobuf::well_known_types::timestamp::Timestamp;
182212
use protobuf::{Message, MessageField};
@@ -186,6 +216,7 @@ mod tests {
186216
use std::io::Write;
187217
use std::path::PathBuf;
188218
use std::time::Duration;
219+
use pyth_lazer_protocol::time::TimestampUs;
189220
use tempfile::NamedTempFile;
190221
use tokio::sync::broadcast::error::TryRecvError;
191222
use tokio::sync::{broadcast, mpsc};
@@ -212,6 +243,18 @@ mod tests {
212243
temp_file
213244
}
214245

246+
fn test_feed_update(feed_id: u32, timestamp: TimestampUs, price: i64) -> FeedUpdate {
247+
FeedUpdate {
248+
feed_id: Some(feed_id),
249+
source_timestamp: MessageField::some(timestamp.into()),
250+
update: Some(Update::PriceUpdate(PriceUpdate {
251+
price: Some(price),
252+
..PriceUpdate::default()
253+
})),
254+
special_fields: Default::default(),
255+
}
256+
}
257+
215258
#[tokio::test]
216259
async fn test_lazer_exporter_task() {
217260
let signing_key_file = get_private_key_file();
@@ -224,6 +267,7 @@ mod tests {
224267
publish_keypair_path: PathBuf::from(signing_key_file.path()),
225268
publish_interval_duration: Duration::from_millis(25),
226269
history_service_url: None,
270+
enable_update_deduplication: false,
227271
};
228272

229273
let (relayer_sender, mut relayer_receiver) = broadcast::channel(CHANNEL_CAPACITY);
@@ -274,4 +318,53 @@ mod tests {
274318
_ => panic!("channel should have a transaction waiting"),
275319
}
276320
}
321+
322+
#[test]
323+
fn test_deduplicate_feed_updates() {
324+
// let's consider a batch containing updates for a single feed. the updates are (ts, price):
325+
// - (1, 10)
326+
// - (2, 10)
327+
// - (3, 10)
328+
// - (4, 15)
329+
// - (5, 15)
330+
// - (6, 10)
331+
// we should only return (1, 10), (4, 15), (6, 10)
332+
333+
let updates = vec![
334+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
335+
test_feed_update(1, TimestampUs::from_millis(2).unwrap(), 10),
336+
test_feed_update(1, TimestampUs::from_millis(3).unwrap(), 10),
337+
test_feed_update(1, TimestampUs::from_millis(4).unwrap(), 15),
338+
test_feed_update(1, TimestampUs::from_millis(5).unwrap(), 15),
339+
test_feed_update(1, TimestampUs::from_millis(6).unwrap(), 10),
340+
];
341+
342+
let expected_updates = vec![
343+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
344+
test_feed_update(1, TimestampUs::from_millis(4).unwrap(), 15),
345+
test_feed_update(1, TimestampUs::from_millis(6).unwrap(), 10),
346+
];
347+
348+
assert_eq!(deduplicate_feed_updates(&updates).unwrap(), expected_updates);
349+
}
350+
351+
#[test]
352+
fn test_deduplicate_feed_updates_multiple_feeds() {
353+
let updates = vec![
354+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
355+
test_feed_update(1, TimestampUs::from_millis(2).unwrap(), 10),
356+
test_feed_update(1, TimestampUs::from_millis(3).unwrap(), 10),
357+
test_feed_update(2, TimestampUs::from_millis(4).unwrap(), 15),
358+
test_feed_update(2, TimestampUs::from_millis(5).unwrap(), 15),
359+
test_feed_update(2, TimestampUs::from_millis(6).unwrap(), 10),
360+
];
361+
362+
let expected_updates = vec![
363+
test_feed_update(1, TimestampUs::from_millis(1).unwrap(), 10),
364+
test_feed_update(2, TimestampUs::from_millis(4).unwrap(), 15),
365+
test_feed_update(2, TimestampUs::from_millis(6).unwrap(), 10),
366+
];
367+
368+
assert_eq!(deduplicate_feed_updates(&updates).unwrap(), expected_updates);
369+
}
277370
}

0 commit comments

Comments
 (0)