Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 4191f56

Browse files
committed
Remove fake prev events from historical state chain
Fix #11091 We have to allow creation of events with no prev_events but do have auth_events. And since the historical member events are outliers with no prev_events to resolve them, we want to avoid putting them as backward extremeties.
1 parent 477c15d commit 4191f56

File tree

8 files changed

+83
-9
lines changed

8 files changed

+83
-9
lines changed

synapse/federation/federation_server.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -200,6 +200,8 @@ async def on_backfill_request(
200200

201201
res = self._transaction_dict_from_pdus(pdus)
202202

203+
logger.info("on_backfill_request res=%s", res)
204+
203205
return 200, res
204206

205207
async def on_incoming_transaction(

synapse/handlers/federation.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1061,6 +1061,7 @@ async def on_backfill_request(
10611061
events = await self.store.get_backfill_events(room_id, pdu_list, limit)
10621062

10631063
events = await filter_events_for_server(self.storage, origin, events)
1064+
logger.info("on_backfill_request resultant events(%d)=%s", len(events), events)
10641065

10651066
return events
10661067

synapse/handlers/federation_event.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,8 @@ async def backfill(
417417
dest, room_id, limit=limit, extremities=extremities
418418
)
419419
logger.info(
420-
"from remote server: got backfill response events=%s",
420+
"from remote server: got backfill response events(%d)=%s",
421+
len(events),
421422
[
422423
{
423424
"event_id": ev.event_id,

synapse/handlers/message.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -955,8 +955,10 @@ async def create_new_client_event(
955955
# event and then try to auth it (which fails with a somewhat confusing "No
956956
# create event in auth events")
957957
assert (
958-
builder.type == EventTypes.Create or len(prev_event_ids) > 0
959-
), "Attempting to create an event with no prev_events"
958+
builder.type == EventTypes.Create
959+
or len(prev_event_ids) > 0
960+
or len(auth_event_ids) > 0
961+
), "Attempting to create an event with no prev_events or auth_event_ids"
960962

961963
event = await builder.build(
962964
prev_event_ids=prev_event_ids,

synapse/handlers/room_batch.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ async def persist_state_events_at_start(
184184

185185
# Make the state events float off on their own so we don't have a
186186
# bunch of `@mxid joined the room` noise between each batch
187-
prev_event_ids_for_state_chain = [generate_fake_event_id()]
187+
prev_event_ids_for_state_chain = [] # generate_fake_event_id()
188188

189189
for state_event in state_events_at_start:
190190
assert_params_in_dict(
@@ -227,6 +227,15 @@ async def persist_state_events_at_start(
227227
# reference and also update in the event when we append later.
228228
auth_event_ids=auth_event_ids.copy(),
229229
)
230+
231+
mem_event = await self.store.get_event(event_id)
232+
logger.info(
233+
"room_batch mem_event_id=%s depth=%s stream_ordering=%s prev_event_ids=%s",
234+
mem_event.event_id,
235+
mem_event.depth,
236+
mem_event.internal_metadata.stream_ordering,
237+
mem_event.prev_event_ids(),
238+
)
230239
else:
231240
# TODO: Add some complement tests that adds state that is not member joins
232241
# and will use this code path. Maybe we only want to support join state events

synapse/handlers/room_member.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -644,7 +644,7 @@ async def update_membership_locked(
644644
if block_invite:
645645
raise SynapseError(403, "Invites have been disabled on this server")
646646

647-
if prev_event_ids:
647+
if prev_event_ids is not None:
648648
return await self._local_membership_update(
649649
requester=requester,
650650
target=target,

synapse/storage/databases/main/event_federation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1127,7 +1127,7 @@ def _get_backfill_events(self, txn, room_id, event_list, limit):
11271127
ev["type"],
11281128
ev["depth"],
11291129
event_lookup_result["stream_ordering"],
1130-
ev["content"].get("body", None),
1130+
ev["content"].get("body", ev["content"]),
11311131
)
11321132
else:
11331133
logger.info(

synapse/storage/databases/main/events.py

Lines changed: 62 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2139,6 +2139,38 @@ def _update_backward_extremeties(self, txn, events):
21392139
21402140
Forward extremities are handled when we first start persisting the events.
21412141
"""
2142+
logger.info(
2143+
"_update_backward_extremeties events=%s",
2144+
[
2145+
{
2146+
"event_id": ev.event_id,
2147+
"prev_events": ev.prev_event_ids(),
2148+
"outlier": ev.internal_metadata.is_outlier(),
2149+
}
2150+
for ev in events
2151+
],
2152+
)
2153+
2154+
for ev in events:
2155+
for e_id in ev.prev_event_ids():
2156+
query = """
2157+
SELECT 1 FROM event_edges
2158+
INNER JOIN events AS e USING (event_id, room_id)
2159+
WHERE event_id = ? AND room_id = ? AND e.outlier = TRUE
2160+
"""
2161+
2162+
txn.execute(
2163+
query,
2164+
(e_id, ev.room_id),
2165+
)
2166+
result = txn.fetchall()
2167+
logger.info(
2168+
"_update_backward_extremeties test ev=%s prev_event_id=%s result=%s",
2169+
ev.event_id,
2170+
e_id,
2171+
result,
2172+
)
2173+
21422174
# From the events passed in, add all of the prev events as backwards extremities.
21432175
# Ignore any events that are already backwards extrems or outliers.
21442176
query = (
@@ -2147,22 +2179,45 @@ def _update_backward_extremeties(self, txn, events):
21472179
" SELECT 1 FROM event_backward_extremities"
21482180
" WHERE event_id = ? AND room_id = ?"
21492181
" )"
2182+
# 1. Don't add an event as a extremity again if we already persisted it
2183+
# as a non-outlier.
2184+
# 2. Don't add an outlier as an extremity if it has no prev_events
21502185
" AND NOT EXISTS ("
2151-
" SELECT 1 FROM events WHERE event_id = ? AND room_id = ? "
2152-
" AND outlier = ?"
2186+
" SELECT 1 FROM events"
2187+
" LEFT JOIN event_edges edge"
2188+
" ON edge.event_id = events.event_id"
2189+
" WHERE events.event_id = ? AND events.room_id = ? AND (events.outlier = FALSE OR edge.event_id IS NULL)"
21532190
" )"
21542191
)
21552192

21562193
txn.execute_batch(
21572194
query,
21582195
[
2159-
(e_id, ev.room_id, e_id, ev.room_id, e_id, ev.room_id, False)
2196+
(e_id, ev.room_id, e_id, ev.room_id, e_id, ev.room_id)
21602197
for ev in events
21612198
for e_id in ev.prev_event_ids()
21622199
if not ev.internal_metadata.is_outlier()
21632200
],
21642201
)
21652202

2203+
for ev in events:
2204+
for e_id in ev.prev_event_ids():
2205+
query = """
2206+
SELECT * FROM event_backward_extremities
2207+
WHERE event_id = ? AND room_id = ?
2208+
"""
2209+
2210+
txn.execute(
2211+
query,
2212+
(e_id, ev.room_id),
2213+
)
2214+
result = txn.fetchall()
2215+
logger.info(
2216+
"_update_backward_extremeties ended up as prev_event_id=%s result=%s",
2217+
e_id,
2218+
result,
2219+
)
2220+
21662221
# Delete all these events that we've already fetched and now know that their
21672222
# prev events are the new backwards extremeties.
21682223
query = (
@@ -2175,6 +2230,10 @@ def _update_backward_extremeties(self, txn, events):
21752230
(ev.event_id, ev.room_id)
21762231
for ev in events
21772232
if not ev.internal_metadata.is_outlier()
2233+
# If we encountered an event with no prev_events, then we might
2234+
# as well remove it now because it won't ever have anything else
2235+
# to backfill from.
2236+
or len(ev.prev_event_ids()) == 0
21782237
],
21792238
)
21802239

0 commit comments

Comments
 (0)