Skip to content

Commit c2ce8dc

Browse files
authored
Small HTTP session fixes (#4601)
1 parent 07854ab commit c2ce8dc

File tree

4 files changed

+35
-31
lines changed

4 files changed

+35
-31
lines changed

scapy/layers/http.py

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -652,16 +652,7 @@ def tcp_reassemble(cls, data, metadata, _):
652652
is_response = isinstance(http_packet.payload, cls.clsresp)
653653
# Packets may have a Content-Length we must honnor
654654
length = http_packet.Content_Length
655-
# Heuristic to try and detect instant HEAD responses, as those include a
656-
# Content-Length that must not be honored. This is a bit crappy, and assumes
657-
# that a 'HEAD' will never include an Encoding...
658-
if (
659-
is_response and
660-
data.endswith(b"\r\n\r\n") and
661-
not http_packet[HTTPResponse]._get_encodings()
662-
):
663-
detect_end = lambda _: True
664-
elif length is not None:
655+
if length is not None:
665656
# The packet provides a Content-Length attribute: let's
666657
# use it. When the total size of the frags is high enough,
667658
# we have the packet
@@ -672,8 +663,12 @@ def tcp_reassemble(cls, data, metadata, _):
672663
detect_end = lambda dat: len(dat) - http_length >= length
673664
else:
674665
# The HTTP layer isn't fully received.
675-
detect_end = lambda dat: False
676-
metadata["detect_unknown"] = True
666+
if metadata.get("tcp_end", False):
667+
# This was likely a HEAD response. Ugh
668+
detect_end = lambda dat: True
669+
else:
670+
detect_end = lambda dat: False
671+
metadata["detect_unknown"] = True
677672
else:
678673
# It's not Content-Length based. It could be chunked
679674
encodings = http_packet[cls].payload._get_encodings()
@@ -833,7 +828,7 @@ def request(self, url, data=b"", timeout=5, follow_redirects=True, **headers):
833828
Perform a HTTP(s) request.
834829
"""
835830
# Parse request url
836-
m = re.match(r"(https?)://([^/:]+)(?:\:(\d+))?(?:/(.*))?", url)
831+
m = re.match(r"(https?)://([^/:]+)(?:\:(\d+))?(/.*)?", url)
837832
if not m:
838833
raise ValueError("Bad URL !")
839834
transport, host, port, path = m.groups()

scapy/layers/l2.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,11 @@ def getmacbyip(ip, chainCC=0):
158158
# Check the routing table
159159
iff, _, gw = conf.route.route(ip)
160160

161-
# Broadcast case
161+
# Limited broadcast
162+
if ip == "255.255.255.255":
163+
return "ff:ff:ff:ff:ff:ff"
164+
165+
# Directed broadcast
162166
if (iff == conf.loopback_name) or (ip in conf.route.get_if_bcast(iff)):
163167
return "ff:ff:ff:ff:ff:ff"
164168

scapy/sessions.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212

1313
from scapy.compat import orb
1414
from scapy.config import conf
15-
from scapy.packet import NoPayload, Packet
15+
from scapy.packet import Packet
1616
from scapy.pton_ntop import inet_pton
1717

1818
# Typing imports
@@ -310,8 +310,6 @@ def process(self,
310310
if TCP not in pkt:
311311
return pkt
312312
pay = pkt[TCP].payload
313-
if isinstance(pay, (NoPayload, conf.padding_layer)):
314-
return pkt
315313
new_data = pay.original
316314
# Match packets by a unique TCP identifier
317315
ident = self._get_ident(pkt)
@@ -333,16 +331,22 @@ def process(self,
333331
metadata["tcp_reassemble"] = tcp_reassemble = streamcls(pay_class)
334332
else:
335333
tcp_reassemble = metadata["tcp_reassemble"]
336-
# Get a relative sequence number for a storage purpose
337-
relative_seq = metadata.get("relative_seq", None)
338-
if relative_seq is None:
339-
relative_seq = metadata["relative_seq"] = seq - 1
340-
seq = seq - relative_seq
341-
# Add the data to the buffer
342-
data.append(new_data, seq)
334+
335+
if pay:
336+
# Get a relative sequence number for a storage purpose
337+
relative_seq = metadata.get("relative_seq", None)
338+
if relative_seq is None:
339+
relative_seq = metadata["relative_seq"] = seq - 1
340+
seq = seq - relative_seq
341+
# Add the data to the buffer
342+
data.append(new_data, seq)
343+
343344
# Check TCP FIN or TCP RESET
344345
if pkt[TCP].flags.F or pkt[TCP].flags.R:
345346
metadata["tcp_end"] = True
347+
elif not pay:
348+
# If there's no payload and the stream isn't ending, ignore.
349+
return pkt
346350

347351
# In case any app layer protocol requires it,
348352
# allow the parser to inspect TCP PSH flag
@@ -393,7 +397,8 @@ def process(self,
393397
if isinstance(packet, conf.padding_layer):
394398
return None
395399
# Rebuild resulting packet
396-
pay.underlayer.remove_payload()
400+
if pay:
401+
pay.underlayer.remove_payload()
397402
if IP in pkt:
398403
pkt[IP].len = None
399404
pkt[IP].chksum = None

test/scapy/layers/http.uts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -79,11 +79,11 @@ assert HTTPRequest in a[3]
7979
assert a[3].Method == b"HEAD"
8080
assert a[3].User_Agent == b'curl/7.88.1'
8181

82-
assert HTTPResponse in a[5]
83-
assert a[5].Content_Type == b'text/html; charset=UTF-8'
84-
assert a[5].Expires == b'Mon, 01 Apr 2024 22:25:38 GMT'
85-
assert a[5].Reason_Phrase == b'Moved Permanently'
86-
assert a[5].X_Frame_Options == b"SAMEORIGIN"
82+
assert HTTPResponse in a[6]
83+
assert a[6].Content_Type == b'text/html; charset=UTF-8'
84+
assert a[6].Expires == b'Mon, 01 Apr 2024 22:25:38 GMT'
85+
assert a[6].Reason_Phrase == b'Moved Permanently'
86+
assert a[6].X_Frame_Options == b"SAMEORIGIN"
8787

8888
= HTTP build with 'chunked' content type
8989

@@ -214,7 +214,7 @@ filename = scapy_path("/test/pcaps/http_tcp_psh.pcap.gz")
214214

215215
pkts = sniff(offline=filename, session=TCPSession)
216216

217-
assert len(pkts) == 15
217+
assert len(pkts) == 14
218218
# Verify a split header exists in the packet
219219
assert pkts[5].User_Agent == b'example_user_agent'
220220

0 commit comments

Comments
 (0)