Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit f20ba02

Browse files
committed
Clean up PR
1 parent ab8011b commit f20ba02

File tree

5 files changed

+24
-88
lines changed

5 files changed

+24
-88
lines changed

scripts-dev/complement.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -65,4 +65,4 @@ if [[ -n "$1" ]]; then
6565
fi
6666

6767
# Run the tests!
68-
go test -v -tags synapse_blacklist,msc2946,msc3083,msc2716,msc2403 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests -run TestBackfillingHistory/parallel/Historical_messages_are_visible_when_already_joined_on_federated_server
68+
go test -v -tags synapse_blacklist,msc2946,msc3083,msc2716,msc2403 -count=1 $EXTRA_COMPLEMENT_ARGS ./tests

synapse/events/utils.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -253,13 +253,13 @@ def format_event_for_client_v1(d):
253253

254254
def format_event_for_client_v2(d):
255255
drop_keys = (
256-
# "auth_events",
257-
# "prev_events",
258-
# "hashes",
259-
# "signatures",
260-
# "depth",
261-
# "origin",
262-
# "prev_state",
256+
"auth_events",
257+
"prev_events",
258+
"hashes",
259+
"signatures",
260+
"depth",
261+
"origin",
262+
"prev_state",
263263
)
264264
for key in drop_keys:
265265
d.pop(key, None)

synapse/handlers/federation.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1054,14 +1054,10 @@ async def maybe_backfill(
10541054
with (await self._room_backfill.queue(room_id)):
10551055
return await self._maybe_backfill_inner(room_id, current_depth, limit)
10561056

1057-
# Todo
10581057
async def _maybe_backfill_inner(
10591058
self, room_id: str, current_depth: int, limit: int
10601059
) -> bool:
10611060
extremities = await self.store.get_oldest_events_with_depth_in_room(room_id)
1062-
logger.info(
1063-
"_maybe_backfill_inner extremities(%d)=%s", len(extremities), extremities
1064-
)
10651061

10661062
if not extremities:
10671063
logger.debug("Not backfilling as no extremeties found.")
@@ -2127,18 +2123,8 @@ async def on_backfill_request(
21272123
limit = min(limit, 100)
21282124

21292125
events = await self.store.get_backfill_events(room_id, pdu_list, limit)
2130-
logger.info(
2131-
"on_backfill_request get_backfill_events events(%d)=%s",
2132-
len(events),
2133-
[f'{ev.content.get("body")}: {ev.type} ({ev.event_id})' for ev in events],
2134-
)
21352126

21362127
events = await filter_events_for_server(self.storage, origin, events)
2137-
logger.info(
2138-
"on_backfill_request filter_events_for_server events(%d)=%s",
2139-
len(events),
2140-
events,
2141-
)
21422128

21432129
return events
21442130

synapse/storage/databases/main/event_federation.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1009,8 +1009,8 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
10091009
connected_insertion_event_query, (event_id, limit - len(event_results))
10101010
)
10111011
connected_insertion_event_id_results = list(txn)
1012-
logger.info(
1013-
"connected_insertion_event_query %s",
1012+
logger.debug(
1013+
"_get_backfill_events: connected_insertion_event_query %s",
10141014
connected_insertion_event_id_results,
10151015
)
10161016
for row in connected_insertion_event_id_results:
@@ -1022,8 +1022,8 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
10221022
chunk_connection_query, (row[1], limit - len(event_results))
10231023
)
10241024
chunk_start_event_id_results = list(txn)
1025-
logger.info(
1026-
"chunk_start_event_id_results %s",
1025+
logger.debug(
1026+
"_get_backfill_events: chunk_start_event_id_results %s",
10271027
chunk_start_event_id_results,
10281028
)
10291029
for row in chunk_start_event_id_results:
@@ -1032,7 +1032,9 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
10321032

10331033
txn.execute(query, (event_id, False, limit - len(event_results)))
10341034
prev_event_id_results = list(txn)
1035-
logger.info("prev_event_ids %s", prev_event_id_results)
1035+
logger.debug(
1036+
"_get_backfill_events: prev_event_ids %s", prev_event_id_results
1037+
)
10361038

10371039
for row in prev_event_id_results:
10381040
if row[1] not in event_results:

synapse/storage/databases/main/events.py

Lines changed: 9 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -1505,7 +1505,6 @@ def _update_metadata_tables_txn(
15051505
self._handle_event_relations(txn, event)
15061506

15071507
self._handle_insertion_event(txn, event)
1508-
self._handle_marker_event(txn, event)
15091508
self._handle_chunk_id(txn, event)
15101509

15111510
# Store the labels for this event.
@@ -1760,19 +1759,19 @@ def _handle_event_relations(self, txn, event):
17601759
if rel_type == RelationTypes.REPLACE:
17611760
txn.call_after(self.store.get_applicable_edit.invalidate, (parent_id,))
17621761

1763-
def _handle_insertion_event(self, txn, event):
1762+
def _handle_insertion_event(self, txn: LoggingTransaction, event: EventBase):
17641763
"""Handles inserting insertion extremeties during peristence of marker events
17651764
17661765
Args:
1767-
txn
1768-
event (EventBase)
1766+
txn: The database transaction object
1767+
event: The event to process
17691768
"""
17701769

17711770
if event.type != EventTypes.MSC2716_INSERTION:
17721771
# Not a insertion event
17731772
return
17741773

1775-
logger.info("_handle_insertion_event %s", event)
1774+
logger.debug("_handle_insertion_event %s", event)
17761775

17771776
next_chunk_id = event.content.get(EventContentFields.MSC2716_NEXT_CHUNK_ID)
17781777
if next_chunk_id is None:
@@ -1802,72 +1801,21 @@ def _handle_insertion_event(self, txn, event):
18021801
},
18031802
)
18041803

1805-
def _handle_marker_event(self, txn, event):
1806-
"""Handles inserting insertion extremeties during peristence of marker events
1807-
1808-
Args:
1809-
txn
1810-
event (EventBase)
1811-
"""
1812-
1813-
if event.type != EventTypes.MSC2716_MARKER:
1814-
# Not a marker event
1815-
return
1816-
1817-
logger.info("_handle_marker_event %s", event)
1818-
1819-
# TODO: We should attempt to backfill the insertion event instead
1820-
# of trying to pack all of the info in the marker event. Otherwise,
1821-
# we need to pack in the insertion_prev_events and insertion_next_chunk_id.
1822-
# GET /_matrix/federation/v1/event/{eventId}
1823-
1824-
insertion_event_id = event.content.get(
1825-
EventContentFields.MSC2716_MARKER_INSERTION
1826-
)
1827-
1828-
# We will trust that the application service sending the marker event is
1829-
# also the one that knows about the insertion event
1830-
# insertion_event_origin = get_domain_from_id(event.sender)
1831-
# m_ev = await self.federation_client.get_event(
1832-
# [insertion_event_origin],
1833-
# insertion_event_id,
1834-
# outlier=True,
1835-
# timeout=10000,
1836-
# )
1837-
# _auth_and_persist_events
1838-
# handle_new_client_event
1839-
1840-
# insertion_prev_event_ids = event.content.get(
1841-
# EventContentFields.MSC2716_MARKER_INSERTION_PREV_EVENTS
1842-
# )
1843-
# if not insertion_event_id or not insertion_prev_event_ids:
1844-
# # Invalid marker event
1845-
# return
1846-
1847-
# for prev_event_id in insertion_prev_event_ids:
1848-
# self.db_pool.simple_insert_txn(
1849-
# txn,
1850-
# table="insertion_event_edges",
1851-
# values={
1852-
# "insertion_event_id": insertion_event_id,
1853-
# "room_id": event.room_id,
1854-
# "insertion_prev_event_id": prev_event_id,
1855-
# },
1856-
# )
1857-
1858-
def _handle_chunk_id(self, txn, event):
1804+
def _handle_chunk_id(self, txn: LoggingTransaction, event: EventBase):
18591805
"""Handles inserting the chunk connections between the event at the
18601806
start of a chunk and an insertion event
18611807
1862-
Args: txn event (EventBase)
1808+
Args:
1809+
txn: The database transaction object
1810+
event: The event to process
18631811
"""
18641812

18651813
chunk_id = event.content.get(EventContentFields.MSC2716_CHUNK_ID)
18661814
if chunk_id is None:
18671815
# No chunk connection to persist
18681816
return
18691817

1870-
logger.info("_handle_chunk_id %s %s", chunk_id, event)
1818+
logger.debug("_handle_chunk_id %s %s", chunk_id, event)
18711819

18721820
# Keep track of the insertion event and the chunk ID
18731821
self.db_pool.simple_insert_txn(

0 commit comments

Comments
 (0)