Skip to content

Commit 4b1b845

Browse files
committed
sphinx: decouple routing info size from legacy payload size
1 parent 762c2a8 commit 4b1b845

File tree

3 files changed

+17
-17
lines changed

3 files changed

+17
-17
lines changed

path.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ func (hp *HopPayload) Decode(r io.Reader) error {
249249
case 0x00:
250250
// Our size is just the payload, without the HMAC. This means
251251
// that this is the legacy payload type.
252-
payloadSize = HopDataSize - HMACSize
252+
payloadSize = LegacyHopDataSize - HMACSize
253253
hp.Type = PayloadLegacy
254254

255255
default:

sphinx.go

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -36,26 +36,26 @@ const (
3636
// utilize this space to pack in the unrolled bytes.
3737
NumPaddingBytes = 12
3838

39-
// HopDataSize is the fixed size of hop_data. BOLT 04 currently
39+
// LegacyHopDataSize is the fixed size of hop_data. BOLT 04 currently
4040
// specifies this to be 1 byte realm, 8 byte channel_id, 8 byte amount
41-
// to forward, 4 byte outgoing CLTV value, 12 bytes padding and 32
42-
// bytes HMAC for a total of 65 bytes per hop.
43-
HopDataSize = (RealmByteSize + AddressSize + AmtForwardSize +
41+
// to forward, 4 byte outgoing CLTV value, 12 bytes padding and 32 bytes
42+
// HMAC for a total of 65 bytes per hop.
43+
LegacyHopDataSize = (RealmByteSize + AddressSize + AmtForwardSize +
4444
OutgoingCLTVSize + NumPaddingBytes + HMACSize)
4545

46-
// MaxPayloadSize is the maximum size a payload for a single hop can
47-
// be. This is the worst case scenario of a single hop, consuming all
48-
// 20 frames. We need to know this in order to generate a sufficiently
49-
// long stream of pseudo-random bytes when encrypting/decrypting the
50-
// payload.
51-
MaxPayloadSize = NumMaxHops * HopDataSize
46+
// MaxPayloadSize is the maximum size a payload for a single hop can be.
47+
// This is the worst case scenario of a single hop, consuming all
48+
// available space. We need to know this in order to generate a
49+
// sufficiently long stream of pseudo-random bytes when
50+
// encrypting/decrypting the payload.
51+
MaxPayloadSize = routingInfoSize
5252

5353
// routingInfoSize is the fixed size of the the routing info. This
5454
// consists of a addressSize byte address and a HMACSize byte HMAC for
5555
// each hop of the route, the first pair in cleartext and the following
56-
// pairs increasingly obfuscated. In case fewer than numMaxHops are
57-
// used, then the remainder is padded with null-bytes, also obfuscated.
58-
routingInfoSize = NumMaxHops * HopDataSize
56+
// pairs increasingly obfuscated. If not all space is used up, the
57+
// remainder is padded with null-bytes, also obfuscated.
58+
routingInfoSize = 1300
5959

6060
// numStreamBytes is the number of bytes produced by our CSPRG for the
6161
// key stream implementing our stream cipher to encrypt/decrypt the mix

sphinx_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ func TestSphinxHopVariableSizedPayloads(t *testing.T) {
598598
eobMapping: map[int]HopPayload{
599599
0: HopPayload{
600600
Type: PayloadTLV,
601-
Payload: bytes.Repeat([]byte("a"), HopDataSize-HMACSize),
601+
Payload: bytes.Repeat([]byte("a"), LegacyHopDataSize-HMACSize),
602602
},
603603
},
604604
},
@@ -610,7 +610,7 @@ func TestSphinxHopVariableSizedPayloads(t *testing.T) {
610610
eobMapping: map[int]HopPayload{
611611
0: HopPayload{
612612
Type: PayloadTLV,
613-
Payload: bytes.Repeat([]byte("a"), HopDataSize*3),
613+
Payload: bytes.Repeat([]byte("a"), LegacyHopDataSize*3),
614614
},
615615
},
616616
},
@@ -631,7 +631,7 @@ func TestSphinxHopVariableSizedPayloads(t *testing.T) {
631631
}, nil),
632632
1: HopPayload{
633633
Type: PayloadTLV,
634-
Payload: bytes.Repeat([]byte("a"), HopDataSize*2),
634+
Payload: bytes.Repeat([]byte("a"), LegacyHopDataSize*2),
635635
},
636636
},
637637
},

0 commit comments

Comments
 (0)