Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions lightning-background-processor/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ edition = "2021"

[package.metadata.docs.rs]
all-features = true
features = ["lightning/std"]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mhh, does this actually work or do we also need to set it on RGS and the BP? Do we know of any way to validate this without running docs.rs in a container etc?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems broken?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yep, looks like it works.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"The requested version does not exist"?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh I guess when I yanked the crate it deleted it...it did work, though.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mhh, previously I got another page that however still looked broken (links not working etc). Any case, fingers crossed it will look unbroken on docs.rs.

rustdoc-args = ["--cfg", "docsrs"]

[features]
Expand Down
2 changes: 0 additions & 2 deletions lightning-block-sync/src/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -235,8 +235,6 @@ impl<'a, L: chain::Listen + ?Sized> chain::Listen for DynamicChainListener<'a, L
struct ChainListenerSet<'a, L: chain::Listen + ?Sized>(Vec<(u32, &'a L)>);

impl<'a, L: chain::Listen + ?Sized> chain::Listen for ChainListenerSet<'a, L> {
// Needed to differentiate test expectations.
#[cfg(test)]
fn block_connected(&self, block: &bitcoin::Block, height: u32) {
for (starting_height, chain_listener) in self.0.iter() {
if height > *starting_height {
Expand Down
4 changes: 4 additions & 0 deletions lightning-rapid-gossip-sync/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ description = """
Utility to process gossip routing data from Rapid Gossip Sync Server.
"""

[package.metadata.docs.rs]
all-features = true
features = ["lightning/std"]

[features]
default = ["std"]
std = []
Expand Down
12 changes: 10 additions & 2 deletions lightning/src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7153,6 +7153,12 @@ impl<SP: Deref> Channel<SP> where
pub fn get_channel_reestablish<L: Deref>(&mut self, logger: &L) -> msgs::ChannelReestablish where L::Target: Logger {
assert!(self.context.channel_state.is_peer_disconnected());
assert_ne!(self.context.cur_counterparty_commitment_transaction_number, INITIAL_COMMITMENT_NUMBER);
// This is generally the first function which gets called on any given channel once we're
// up and running normally. Thus, we take this opportunity to attempt to resolve the
// `holder_commitment_point` to get any keys which we are currently missing.
self.context.holder_commitment_point.try_resolve_pending(
&self.context.holder_signer, &self.context.secp_ctx, logger,
);
// Prior to static_remotekey, my_current_per_commitment_point was critical to claiming
// current to_remote balances. However, it no longer has any use, and thus is now simply
// set to a dummy (but valid, as required by the spec) public key.
Expand Down Expand Up @@ -9464,8 +9470,10 @@ impl<'a, 'b, 'c, ES: Deref, SP: Deref> ReadableArgs<(&'a ES, &'b SP, u32, &'c Ch
// TODO(async_signing): remove this expect with the Uninitialized variant
let current = holder_signer.get_per_commitment_point(cur_holder_commitment_transaction_number, &secp_ctx)
.expect("Must be able to derive the current commitment point upon channel restoration");
HolderCommitmentPoint::PendingNext {
transaction_number: cur_holder_commitment_transaction_number, current,
let next = holder_signer.get_per_commitment_point(cur_holder_commitment_transaction_number - 1, &secp_ctx)
.expect("Must be able to derive the next commitment point upon channel restoration");
HolderCommitmentPoint::Available {
transaction_number: cur_holder_commitment_transaction_number, current, next,
}
},
};
Expand Down
4 changes: 3 additions & 1 deletion lightning/src/routing/gossip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1898,7 +1898,9 @@ impl<L: Deref> NetworkGraph<L> where L::Target: Logger {
IndexedMapEntry::Vacant(node_entry) => {
let mut removed_node_counters = self.removed_node_counters.lock().unwrap();
**chan_info_node_counter = removed_node_counters.pop()
.unwrap_or(self.next_node_counter.fetch_add(1, Ordering::Relaxed) as u32);
.unwrap_or_else(|| {
self.next_node_counter.fetch_add(1, Ordering::Relaxed) as u32
});
node_entry.insert(NodeInfo {
channels: vec!(short_channel_id),
announcement_info: None,
Expand Down
16 changes: 14 additions & 2 deletions lightning/src/routing/scoring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1818,15 +1818,27 @@ mod bucketed_history {
// values, which will result in us thinking we have some nontrivial probability of
// routing up to that amount.
if min_liquidity_offset_history_buckets[0] != 0 {
let mut highest_max_bucket_with_points = 0; // The highest max-bucket with any data
// Track the highest max-buckets with any data at all, as well as the highest
// max-bucket with at least BUCKET_FIXED_POINT_ONE.
let mut highest_max_bucket_with_points = 0;
let mut highest_max_bucket_with_full_points = None;
let mut total_max_points = 0; // Total points in max-buckets to consider
for (max_idx, max_bucket) in max_liquidity_offset_history_buckets.iter().enumerate() {
if *max_bucket >= BUCKET_FIXED_POINT_ONE {
highest_max_bucket_with_full_points = Some(cmp::max(highest_max_bucket_with_full_points.unwrap_or(0), max_idx));
}
if *max_bucket != 0 {
highest_max_bucket_with_points = cmp::max(highest_max_bucket_with_points, max_idx);
}
total_max_points += *max_bucket as u64;
}
let max_bucket_end_pos = BUCKET_START_POS[32 - highest_max_bucket_with_points] - 1;
// Use the highest max-bucket with at least BUCKET_FIXED_POINT_ONE, but if none is
// available use the highest max-bucket with any non-zero value. This ensures that
// if we have substantially decayed data we don't end up thinking the highest
// max-bucket is zero even though we have no points in the 0th max-bucket and do
// have points elsewhere.
let selected_max = highest_max_bucket_with_full_points.unwrap_or(highest_max_bucket_with_points);
let max_bucket_end_pos = BUCKET_START_POS[32 - selected_max] - 1;
if payment_pos < max_bucket_end_pos {
let (numerator, denominator) = success_probability(payment_pos as u64, 0,
max_bucket_end_pos as u64, POSITION_TICKS as u64 - 1, params, true);
Expand Down