Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit 8d6f97f

Browse files
committed
Merge remote-tracking branch 'origin/release-v1.20.0' into develop
2 parents 77794eb + d8762cc commit 8d6f97f

File tree

11 files changed

+122
-36
lines changed

11 files changed

+122
-36
lines changed

changelog.d/8264.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add more logging to debug slow startup.

changelog.d/8266.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Do not attempt to upgrade upgrade database schema on worker processes.

changelog.d/8270.feature

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add unread messages count to sync responses, as specified in [MSC2654](https://github.com/matrix-org/matrix-doc/pull/2654).

changelog.d/8271.bugfix

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Fix slow start times for large servers by removing a table scan of the `users` table from startup code.

changelog.d/8274.feature

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Add unread messages count to sync responses, as specified in [MSC2654](https://github.com/matrix-org/matrix-doc/pull/2654).

synapse/push/bulk_push_rule_evaluator.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -219,7 +219,12 @@ async def action_for_event_by_user(self, event, context) -> None:
219219
if event.type == EventTypes.Member and event.state_key == uid:
220220
display_name = event.content.get("displayname", None)
221221

222-
actions_by_user[uid] = []
222+
if count_as_unread:
223+
# Add an element for the current user if the event needs to be marked as
224+
# unread, so that add_push_actions_to_staging iterates over it.
225+
# If the event shouldn't be marked as unread but should notify the
226+
# current user, it'll be added to the dict later.
227+
actions_by_user[uid] = []
223228

224229
for rule in rules:
225230
if "enabled" in rule and not rule["enabled"]:

synapse/storage/databases/__init__.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -47,17 +47,24 @@ def __init__(self, main_store_class, hs):
4747
engine = create_engine(database_config.config)
4848

4949
with make_conn(database_config, engine) as db_conn:
50-
logger.info("Preparing database %r...", db_name)
51-
50+
logger.info("[database config %r]: Checking database server", db_name)
5251
engine.check_database(db_conn)
52+
53+
logger.info(
54+
"[database config %r]: Preparing for databases %r",
55+
db_name,
56+
database_config.databases,
57+
)
5358
prepare_database(
5459
db_conn, engine, hs.config, databases=database_config.databases,
5560
)
5661

5762
database = DatabasePool(hs, database_config, engine)
5863

5964
if "main" in database_config.databases:
60-
logger.info("Starting 'main' data store")
65+
logger.info(
66+
"[database config %r]: Starting 'main' database", db_name
67+
)
6168

6269
# Sanity check we don't try and configure the main store on
6370
# multiple databases.
@@ -72,7 +79,9 @@ def __init__(self, main_store_class, hs):
7279
persist_events = PersistEventsStore(hs, database, main)
7380

7481
if "state" in database_config.databases:
75-
logger.info("Starting 'state' data store")
82+
logger.info(
83+
"[database config %r]: Starting 'state' database", db_name
84+
)
7685

7786
# Sanity check we don't try and configure the state store on
7887
# multiple databases.
@@ -85,7 +94,7 @@ def __init__(self, main_store_class, hs):
8594

8695
self.databases.append(database)
8796

88-
logger.info("Database %r prepared", db_name)
97+
logger.info("[database config %r]: prepared", db_name)
8998

9099
# Closing the context manager doesn't close the connection.
91100
# psycopg will close the connection when the object gets GCed, but *only*
@@ -98,10 +107,10 @@ def __init__(self, main_store_class, hs):
98107

99108
# Sanity check that we have actually configured all the required stores.
100109
if not main:
101-
raise Exception("No 'main' data store configured")
110+
raise Exception("No 'main' database configured")
102111

103112
if not state:
104-
raise Exception("No 'state' data store configured")
113+
raise Exception("No 'state' database configured")
105114

106115
# We use local variables here to ensure that the databases do not have
107116
# optional types.

synapse/storage/databases/main/__init__.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
MultiWriterIdGenerator,
3030
StreamIdGenerator,
3131
)
32+
from synapse.types import get_domain_from_id
3233
from synapse.util.caches.stream_change_cache import StreamChangeCache
3334

3435
from .account_data import AccountDataStore
@@ -591,21 +592,24 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig
591592
"""Called before upgrading an existing database to check that it is broadly sane
592593
compared with the configuration.
593594
"""
594-
domain = config.server_name
595+
logger.info("Checking database for consistency with configuration...")
595596

596-
sql = database_engine.convert_param_style(
597-
"SELECT COUNT(*) FROM users WHERE name NOT LIKE ?"
598-
)
599-
pat = "%:" + domain
600-
cur.execute(sql, (pat,))
601-
num_not_matching = cur.fetchall()[0][0]
602-
if num_not_matching == 0:
597+
# if there are any users in the database, check that the username matches our
598+
# configured server name.
599+
600+
cur.execute("SELECT name FROM users LIMIT 1")
601+
rows = cur.fetchall()
602+
if not rows:
603+
return
604+
605+
user_domain = get_domain_from_id(rows[0][0])
606+
if user_domain == config.server_name:
603607
return
604608

605609
raise Exception(
606610
"Found users in database not native to %s!\n"
607-
"You cannot changed a synapse server_name after it's been configured"
608-
% (domain,)
611+
"You cannot change a synapse server_name after it's been configured"
612+
% (config.server_name,)
609613
)
610614

611615

synapse/storage/databases/main/event_push_actions.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,12 @@ def _get_unread_counts_by_pos_txn(self, txn, room_id, user_id, stream_ordering):
177177

178178
if row:
179179
notif_count += row[0]
180-
unread_count += row[1]
180+
181+
if row[1] is not None:
182+
# The unread_count column of event_push_summary is NULLable, so we need
183+
# to make sure we don't try increasing the unread counts if it's NULL
184+
# for this row.
185+
unread_count += row[1]
181186

182187
return {
183188
"notify_count": notif_count,

synapse/storage/prepare_database.py

Lines changed: 70 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,22 @@ class UpgradeDatabaseException(PrepareDatabaseException):
5050
pass
5151

5252

53+
OUTDATED_SCHEMA_ON_WORKER_ERROR = (
54+
"Expected database schema version %i but got %i: run the main synapse process to "
55+
"upgrade the database schema before starting worker processes."
56+
)
57+
58+
EMPTY_DATABASE_ON_WORKER_ERROR = (
59+
"Uninitialised database: run the main synapse process to prepare the database "
60+
"schema before starting worker processes."
61+
)
62+
63+
UNAPPLIED_DELTA_ON_WORKER_ERROR = (
64+
"Database schema delta %s has not been applied: run the main synapse process to "
65+
"upgrade the database schema before starting worker processes."
66+
)
67+
68+
5369
def prepare_database(
5470
db_conn: Connection,
5571
database_engine: BaseDatabaseEngine,
@@ -83,30 +99,49 @@ def prepare_database(
8399
# at all, so this is redundant but harmless there.)
84100
cur.execute("BEGIN TRANSACTION")
85101

102+
logger.info("%r: Checking existing schema version", databases)
86103
version_info = _get_or_create_schema_state(cur, database_engine)
87104

88105
if version_info:
89106
user_version, delta_files, upgraded = version_info
107+
logger.info(
108+
"%r: Existing schema is %i (+%i deltas)",
109+
databases,
110+
user_version,
111+
len(delta_files),
112+
)
90113

114+
# config should only be None when we are preparing an in-memory SQLite db,
115+
# which should be empty.
91116
if config is None:
92-
if user_version != SCHEMA_VERSION:
93-
# If we don't pass in a config file then we are expecting to
94-
# have already upgraded the DB.
95-
raise UpgradeDatabaseException(
96-
"Expected database schema version %i but got %i"
97-
% (SCHEMA_VERSION, user_version)
98-
)
99-
else:
100-
_upgrade_existing_database(
101-
cur,
102-
user_version,
103-
delta_files,
104-
upgraded,
105-
database_engine,
106-
config,
107-
databases=databases,
117+
raise ValueError(
118+
"config==None in prepare_database, but databse is not empty"
108119
)
120+
121+
# if it's a worker app, refuse to upgrade the database, to avoid multiple
122+
# workers doing it at once.
123+
if config.worker_app is not None and user_version != SCHEMA_VERSION:
124+
raise UpgradeDatabaseException(
125+
OUTDATED_SCHEMA_ON_WORKER_ERROR % (SCHEMA_VERSION, user_version)
126+
)
127+
128+
_upgrade_existing_database(
129+
cur,
130+
user_version,
131+
delta_files,
132+
upgraded,
133+
database_engine,
134+
config,
135+
databases=databases,
136+
)
109137
else:
138+
logger.info("%r: Initialising new database", databases)
139+
140+
# if it's a worker app, refuse to upgrade the database, to avoid multiple
141+
# workers doing it at once.
142+
if config and config.worker_app is not None:
143+
raise UpgradeDatabaseException(EMPTY_DATABASE_ON_WORKER_ERROR)
144+
110145
_setup_new_database(cur, database_engine, databases=databases)
111146

112147
# check if any of our configured dynamic modules want a database
@@ -312,6 +347,8 @@ def _upgrade_existing_database(
312347
else:
313348
assert config
314349

350+
is_worker = config and config.worker_app is not None
351+
315352
if current_version > SCHEMA_VERSION:
316353
raise ValueError(
317354
"Cannot use this database as it is too "
@@ -339,7 +376,7 @@ def _upgrade_existing_database(
339376
specific_engine_extensions = (".sqlite", ".postgres")
340377

341378
for v in range(start_ver, SCHEMA_VERSION + 1):
342-
logger.info("Upgrading schema to v%d", v)
379+
logger.info("Applying schema deltas for v%d", v)
343380

344381
# We need to search both the global and per data store schema
345382
# directories for schema updates.
@@ -399,9 +436,15 @@ def _upgrade_existing_database(
399436
continue
400437

401438
root_name, ext = os.path.splitext(file_name)
439+
402440
if ext == ".py":
403441
# This is a python upgrade module. We need to import into some
404442
# package and then execute its `run_upgrade` function.
443+
if is_worker:
444+
raise PrepareDatabaseException(
445+
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
446+
)
447+
405448
module_name = "synapse.storage.v%d_%s" % (v, root_name)
406449
with open(absolute_path) as python_file:
407450
module = imp.load_source(module_name, absolute_path, python_file)
@@ -416,10 +459,18 @@ def _upgrade_existing_database(
416459
continue
417460
elif ext == ".sql":
418461
# A plain old .sql file, just read and execute it
462+
if is_worker:
463+
raise PrepareDatabaseException(
464+
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
465+
)
419466
logger.info("Applying schema %s", relative_path)
420467
executescript(cur, absolute_path)
421468
elif ext == specific_engine_extension and root_name.endswith(".sql"):
422469
# A .sql file specific to our engine; just read and execute it
470+
if is_worker:
471+
raise PrepareDatabaseException(
472+
UNAPPLIED_DELTA_ON_WORKER_ERROR % relative_path
473+
)
423474
logger.info("Applying engine-specific schema %s", relative_path)
424475
executescript(cur, absolute_path)
425476
elif ext in specific_engine_extensions and root_name.endswith(".sql"):
@@ -449,6 +500,8 @@ def _upgrade_existing_database(
449500
(v, True),
450501
)
451502

503+
logger.info("Schema now up to date")
504+
452505

453506
def _apply_module_schemas(txn, database_engine, config):
454507
"""Apply the module schemas for the dynamic modules, if any

0 commit comments

Comments
 (0)