Skip to content

Commit 3fccdff

Browse files
authored
Merge pull request #54 from arik-so/2023/08/incremental-update-fix
Only send full updates with announcements
2 parents c41347f + d3028db commit 3fccdff

File tree

5 files changed

+36
-29
lines changed

5 files changed

+36
-29
lines changed

src/config.rs

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use lightning::util::ser::Readable;
1515
use lightning_block_sync::http::HttpEndpoint;
1616
use tokio_postgres::Config;
1717

18-
pub(crate) const SCHEMA_VERSION: i32 = 11;
18+
pub(crate) const SCHEMA_VERSION: i32 = 12;
1919
pub(crate) const SNAPSHOT_CALCULATION_INTERVAL: u32 = 3600 * 24; // every 24 hours, in seconds
2020
/// If the last update in either direction was more than six days ago, we send a reminder
2121
/// That reminder may be either in the form of a channel announcement, or in the form of empty
@@ -117,11 +117,12 @@ pub(crate) fn db_channel_update_table_creation_query() -> &'static str {
117117

118118
pub(crate) fn db_index_creation_query() -> &'static str {
119119
"
120-
CREATE INDEX IF NOT EXISTS channel_updates_seen_with_id_direction_blob ON channel_updates(seen) INCLUDE (id, direction, blob_signed);
121120
CREATE INDEX IF NOT EXISTS channel_updates_seen_scid ON channel_updates(seen, short_channel_id);
122121
CREATE INDEX IF NOT EXISTS channel_updates_scid_dir_seen_asc ON channel_updates(short_channel_id, direction, seen);
123122
CREATE INDEX IF NOT EXISTS channel_updates_scid_dir_seen_desc_with_id ON channel_updates(short_channel_id ASC, direction ASC, seen DESC) INCLUDE (id);
124123
CREATE UNIQUE INDEX IF NOT EXISTS channel_updates_key ON channel_updates (short_channel_id, direction, timestamp);
124+
CREATE INDEX IF NOT EXISTS channel_updates_seen ON channel_updates(seen);
125+
CREATE INDEX IF NOT EXISTS channel_updates_timestamp_desc ON channel_updates(timestamp DESC);
125126
"
126127
}
127128

@@ -254,6 +255,12 @@ pub(crate) async fn upgrade_db(schema: i32, client: &mut tokio_postgres::Client)
254255
tx.execute("UPDATE config SET db_schema = 11 WHERE id = 1", &[]).await.unwrap();
255256
tx.commit().await.unwrap();
256257
}
258+
if schema >= 1 && schema <= 11 {
259+
let tx = client.transaction().await.unwrap();
260+
tx.execute("DROP INDEX IF EXISTS channel_updates_seen_with_id_direction_blob", &[]).await.unwrap();
261+
tx.execute("UPDATE config SET db_schema = 12 WHERE id = 1", &[]).await.unwrap();
262+
tx.commit().await.unwrap();
263+
}
257264
if schema <= 1 || schema > SCHEMA_VERSION {
258265
panic!("Unknown schema in db: {}, we support up to {}", schema, SCHEMA_VERSION);
259266
}

src/lib.rs

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ use lightning::routing::gossip::{NetworkGraph, NodeId};
2020
use lightning::util::logger::Logger;
2121
use lightning::util::ser::{ReadableArgs, Writeable};
2222
use tokio::sync::mpsc;
23+
use tokio_postgres::{Client, NoTls};
2324
use crate::lookup::DeltaSet;
2425

2526
use crate::persistence::GossipPersister;
@@ -110,6 +111,20 @@ impl<L: Deref + Clone + Send + Sync + 'static> RapidSyncProcessor<L> where L::Ta
110111
}
111112
}
112113

114+
pub(crate) async fn connect_to_db() -> Client {
115+
let connection_config = config::db_connection_config();
116+
let (client, connection) = connection_config.connect(NoTls).await.unwrap();
117+
118+
tokio::spawn(async move {
119+
if let Err(e) = connection.await {
120+
panic!("connection error: {}", e);
121+
}
122+
});
123+
124+
client.execute("set time zone UTC", &[]).await.unwrap();
125+
client
126+
}
127+
113128
/// This method generates a no-op blob that can be used as a delta where none exists.
114129
///
115130
/// The primary purpose of this method is the scenario of a client retrieving and processing a
@@ -142,16 +157,10 @@ fn serialize_empty_blob(current_timestamp: u64) -> Vec<u8> {
142157
}
143158

144159
async fn serialize_delta<L: Deref + Clone>(network_graph: Arc<NetworkGraph<L>>, last_sync_timestamp: u32, logger: L) -> SerializedResponse where L::Target: Logger {
145-
let (client, connection) = lookup::connect_to_db().await;
160+
let client = connect_to_db().await;
146161

147162
network_graph.remove_stale_channels_and_tracking();
148163

149-
tokio::spawn(async move {
150-
if let Err(e) = connection.await {
151-
panic!("connection error: {}", e);
152-
}
153-
});
154-
155164
let mut output: Vec<u8> = vec![];
156165

157166
// set a flag if the chain hash is prepended

src/lookup.rs

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,7 @@ use std::time::{Instant, SystemTime, UNIX_EPOCH};
77
use lightning::ln::msgs::{ChannelAnnouncement, ChannelUpdate, UnsignedChannelAnnouncement, UnsignedChannelUpdate};
88
use lightning::routing::gossip::NetworkGraph;
99
use lightning::util::ser::Readable;
10-
use tokio_postgres::{Client, Connection, NoTls, Socket};
11-
use tokio_postgres::tls::NoTlsStream;
10+
use tokio_postgres::Client;
1211

1312
use futures::StreamExt;
1413
use lightning::log_info;
@@ -68,11 +67,6 @@ impl Default for DirectedUpdateDelta {
6867
}
6968
}
7069

71-
pub(super) async fn connect_to_db() -> (Client, Connection<Socket, NoTlsStream>) {
72-
let connection_config = config::db_connection_config();
73-
connection_config.connect(NoTls).await.unwrap()
74-
}
75-
7670
/// Fetch all the channel announcements that are presently in the network graph, regardless of
7771
/// whether they had been seen before.
7872
/// Also include all announcements for which the first update was announced
@@ -288,6 +282,7 @@ pub(super) async fn fetch_channel_updates<L: Deref>(delta_set: &mut DeltaSet, cl
288282
SELECT id, direction, blob_signed, CAST(EXTRACT('epoch' from seen) AS BIGINT) AS seen
289283
FROM channel_updates
290284
WHERE seen >= TO_TIMESTAMP($1)
285+
ORDER BY timestamp DESC
291286
", [last_sync_timestamp_float]).await.unwrap();
292287
let mut pinned_updates = Box::pin(intermediate_updates);
293288
log_info!(logger, "Fetched intermediate rows in {:?}", start.elapsed());

src/persistence.rs

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ use lightning::routing::gossip::NetworkGraph;
88
use lightning::util::logger::Logger;
99
use lightning::util::ser::Writeable;
1010
use tokio::sync::mpsc;
11-
use tokio_postgres::NoTls;
1211

1312
use crate::config;
1413
use crate::types::GossipMessage;
@@ -33,15 +32,7 @@ impl<L: Deref> GossipPersister<L> where L::Target: Logger {
3332
}
3433

3534
pub(crate) async fn persist_gossip(&mut self) {
36-
let connection_config = config::db_connection_config();
37-
let (mut client, connection) =
38-
connection_config.connect(NoTls).await.unwrap();
39-
40-
tokio::spawn(async move {
41-
if let Err(e) = connection.await {
42-
panic!("connection error: {}", e);
43-
}
44-
});
35+
let mut client = crate::connect_to_db().await;
4536

4637
{
4738
// initialize the database
@@ -57,6 +48,11 @@ impl<L: Deref> GossipPersister<L> where L::Target: Logger {
5748
config::upgrade_db(cur_schema[0].get(0), &mut client).await;
5849
}
5950

51+
let preparation = client.execute("set time zone UTC", &[]).await;
52+
if let Err(preparation_error) = preparation {
53+
panic!("db preparation error: {}", preparation_error);
54+
}
55+
6056
let initialization = client
6157
.execute(
6258
// TODO: figure out a way to fix the id value without Postgres complaining about

src/serialization.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -140,12 +140,12 @@ pub(super) fn serialize_delta_set(delta_set: DeltaSet, last_sync_timestamp: u32)
140140

141141
let current_announcement_seen = channel_announcement_delta.seen;
142142
let is_new_announcement = current_announcement_seen >= last_sync_timestamp;
143-
let is_newly_updated_announcement = if let Some(first_update_seen) = channel_delta.first_bidirectional_updates_seen {
143+
let is_newly_included_announcement = if let Some(first_update_seen) = channel_delta.first_bidirectional_updates_seen {
144144
first_update_seen >= last_sync_timestamp
145145
} else {
146146
false
147147
};
148-
let send_announcement = is_new_announcement || is_newly_updated_announcement;
148+
let send_announcement = is_new_announcement || is_newly_included_announcement;
149149
if send_announcement {
150150
serialization_set.latest_seen = max(serialization_set.latest_seen, current_announcement_seen);
151151
serialization_set.announcements.push(channel_delta.announcement.unwrap().announcement);
@@ -166,7 +166,7 @@ pub(super) fn serialize_delta_set(delta_set: DeltaSet, last_sync_timestamp: u32)
166166

167167
if updates.last_update_before_seen.is_some() {
168168
let mutated_properties = updates.mutated_properties;
169-
if mutated_properties.len() == 5 {
169+
if mutated_properties.len() == 5 || send_announcement {
170170
// all five values have changed, it makes more sense to just
171171
// serialize the update as a full update instead of as a change
172172
// this way, the default values can be computed more efficiently

0 commit comments

Comments
 (0)