Skip to content

Commit d2c582e

Browse files
Move unique snowflake homeserver background tasks to start_background_tasks (#19037)
(the standard pattern for this kind of thing)
1 parent 2d07bd7 commit d2c582e

File tree

14 files changed

+333
-184
lines changed

14 files changed

+333
-184
lines changed

changelog.d/19037.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Move unique snowflake homeserver background tasks to `start_background_tasks` (the standard pattern for this kind of thing).

synapse/app/_base.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,6 @@
6464
import synapse.util.caches
6565
from synapse.api.constants import MAX_PDU_SIZE
6666
from synapse.app import check_bind_error
67-
from synapse.app.phone_stats_home import start_phone_stats_home
6867
from synapse.config import ConfigError
6968
from synapse.config._base import format_config_error
7069
from synapse.config.homeserver import HomeServerConfig
@@ -683,15 +682,6 @@ def log_shutdown() -> None:
683682
if hs.config.worker.run_background_tasks:
684683
hs.start_background_tasks()
685684

686-
# TODO: This should be moved to same pattern we use for other background tasks:
687-
# Add to `REQUIRED_ON_BACKGROUND_TASK_STARTUP` and rely on
688-
# `start_background_tasks` to start it.
689-
await hs.get_common_usage_metrics_manager().setup()
690-
691-
# TODO: This feels like another pattern that should refactored as one of the
692-
# `REQUIRED_ON_BACKGROUND_TASK_STARTUP`
693-
start_phone_stats_home(hs)
694-
695685
if freeze:
696686
# We now freeze all allocated objects in the hopes that (almost)
697687
# everything currently allocated are things that will be used for the

synapse/metrics/common_usage_metrics.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ async def get_metrics(self) -> CommonUsageMetrics:
6262
"""
6363
return await self._collect()
6464

65-
async def setup(self) -> None:
65+
def setup(self) -> None:
6666
"""Keep the gauges for common usage metrics up to date."""
6767
self._hs.run_as_background_process(
6868
desc="common_usage_metrics_update_gauges",

synapse/server.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@
6262
from synapse.api.filtering import Filtering
6363
from synapse.api.ratelimiting import Ratelimiter, RequestRatelimiter
6464
from synapse.app._base import unregister_sighups
65+
from synapse.app.phone_stats_home import start_phone_stats_home
6566
from synapse.appservice.api import ApplicationServiceApi
6667
from synapse.appservice.scheduler import ApplicationServiceScheduler
6768
from synapse.config.homeserver import HomeServerConfig
@@ -643,6 +644,8 @@ def start_background_tasks(self) -> None:
643644
for i in self.REQUIRED_ON_BACKGROUND_TASK_STARTUP:
644645
getattr(self, "get_" + i + "_handler")()
645646
self.get_task_scheduler()
647+
self.get_common_usage_metrics_manager().setup()
648+
start_phone_stats_home(self)
646649

647650
def get_reactor(self) -> ISynapseReactor:
648651
"""

tests/replication/_base.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,12 @@ def request_factory(*args: Any, **kwargs: Any) -> SynapseRequest:
214214
client_to_server_transport.loseConnection()
215215

216216
# there should have been exactly one request
217-
self.assertEqual(len(requests), 1)
217+
self.assertEqual(
218+
len(requests),
219+
1,
220+
"Expected to handle exactly one HTTP replication request but saw %d - requests=%s"
221+
% (len(requests), requests),
222+
)
218223

219224
return requests[0]
220225

tests/replication/tcp/streams/test_account_data.py

Lines changed: 33 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -46,28 +46,39 @@ def test_update_function_room_account_data_limit(self) -> None:
4646

4747
# check we're testing what we think we are: no rows should yet have been
4848
# received
49-
self.assertEqual([], self.test_handler.received_rdata_rows)
49+
received_account_data_rows = [
50+
row
51+
for row in self.test_handler.received_rdata_rows
52+
if row[0] == AccountDataStream.NAME
53+
]
54+
self.assertEqual([], received_account_data_rows)
5055

5156
# now reconnect to pull the updates
5257
self.reconnect()
5358
self.replicate()
5459

55-
# we should have received all the expected rows in the right order
56-
received_rows = self.test_handler.received_rdata_rows
60+
# We should have received all the expected rows in the right order
61+
#
62+
# Filter the updates to only include account data changes
63+
received_account_data_rows = [
64+
row
65+
for row in self.test_handler.received_rdata_rows
66+
if row[0] == AccountDataStream.NAME
67+
]
5768

5869
for t in updates:
59-
(stream_name, token, row) = received_rows.pop(0)
70+
(stream_name, token, row) = received_account_data_rows.pop(0)
6071
self.assertEqual(stream_name, AccountDataStream.NAME)
6172
self.assertIsInstance(row, AccountDataStream.AccountDataStreamRow)
6273
self.assertEqual(row.data_type, t)
6374
self.assertEqual(row.room_id, "test_room")
6475

65-
(stream_name, token, row) = received_rows.pop(0)
76+
(stream_name, token, row) = received_account_data_rows.pop(0)
6677
self.assertIsInstance(row, AccountDataStream.AccountDataStreamRow)
6778
self.assertEqual(row.data_type, "m.global")
6879
self.assertIsNone(row.room_id)
6980

70-
self.assertEqual([], received_rows)
81+
self.assertEqual([], received_account_data_rows)
7182

7283
def test_update_function_global_account_data_limit(self) -> None:
7384
"""Test replication with many global account data updates"""
@@ -85,32 +96,38 @@ def test_update_function_global_account_data_limit(self) -> None:
8596
store.add_account_data_to_room("test_user", "test_room", "m.per_room", {})
8697
)
8798

88-
# tell the notifier to catch up to avoid duplicate rows.
89-
# workaround for https://github.com/matrix-org/synapse/issues/7360
90-
# FIXME remove this when the above is fixed
91-
self.replicate()
92-
9399
# check we're testing what we think we are: no rows should yet have been
94100
# received
95-
self.assertEqual([], self.test_handler.received_rdata_rows)
101+
received_account_data_rows = [
102+
row
103+
for row in self.test_handler.received_rdata_rows
104+
if row[0] == AccountDataStream.NAME
105+
]
106+
self.assertEqual([], received_account_data_rows)
96107

97108
# now reconnect to pull the updates
98109
self.reconnect()
99110
self.replicate()
100111

101112
# we should have received all the expected rows in the right order
102-
received_rows = self.test_handler.received_rdata_rows
113+
#
114+
# Filter the updates to only include typing changes
115+
received_account_data_rows = [
116+
row
117+
for row in self.test_handler.received_rdata_rows
118+
if row[0] == AccountDataStream.NAME
119+
]
103120

104121
for t in updates:
105-
(stream_name, token, row) = received_rows.pop(0)
122+
(stream_name, token, row) = received_account_data_rows.pop(0)
106123
self.assertEqual(stream_name, AccountDataStream.NAME)
107124
self.assertIsInstance(row, AccountDataStream.AccountDataStreamRow)
108125
self.assertEqual(row.data_type, t)
109126
self.assertIsNone(row.room_id)
110127

111-
(stream_name, token, row) = received_rows.pop(0)
128+
(stream_name, token, row) = received_account_data_rows.pop(0)
112129
self.assertIsInstance(row, AccountDataStream.AccountDataStreamRow)
113130
self.assertEqual(row.data_type, "m.per_room")
114131
self.assertEqual(row.room_id, "test_room")
115132

116-
self.assertEqual([], received_rows)
133+
self.assertEqual([], received_account_data_rows)

0 commit comments

Comments
 (0)