Skip to content
This repository was archived by the owner on Apr 26, 2024. It is now read-only.

Commit bdfde6d

Browse files
Use inline type hints in http/federation/, storage/ and util/ (#10381)
1 parent 3acf85c commit bdfde6d

38 files changed

+150
-162
lines changed

changelog.d/10381.misc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
Convert internal type variable syntax to reflect wider ecosystem use.

synapse/http/federation/well_known_resolver.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -70,10 +70,8 @@
7070
logger = logging.getLogger(__name__)
7171

7272

73-
_well_known_cache = TTLCache("well-known") # type: TTLCache[bytes, Optional[bytes]]
74-
_had_valid_well_known_cache = TTLCache(
75-
"had-valid-well-known"
76-
) # type: TTLCache[bytes, bool]
73+
_well_known_cache: TTLCache[bytes, Optional[bytes]] = TTLCache("well-known")
74+
_had_valid_well_known_cache: TTLCache[bytes, bool] = TTLCache("had-valid-well-known")
7775

7876

7977
@attr.s(slots=True, frozen=True)
@@ -130,9 +128,10 @@ async def get_well_known(self, server_name: bytes) -> WellKnownLookupResult:
130128
# requests for the same server in parallel?
131129
try:
132130
with Measure(self._clock, "get_well_known"):
133-
result, cache_period = await self._fetch_well_known(
134-
server_name
135-
) # type: Optional[bytes], float
131+
result: Optional[bytes]
132+
cache_period: float
133+
134+
result, cache_period = await self._fetch_well_known(server_name)
136135

137136
except _FetchWellKnownFailure as e:
138137
if prev_result and e.temporary:

synapse/storage/background_updates.py

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -92,14 +92,12 @@ def __init__(self, hs: "HomeServer", database: "DatabasePool"):
9292
self.db_pool = database
9393

9494
# if a background update is currently running, its name.
95-
self._current_background_update = None # type: Optional[str]
96-
97-
self._background_update_performance = (
98-
{}
99-
) # type: Dict[str, BackgroundUpdatePerformance]
100-
self._background_update_handlers = (
101-
{}
102-
) # type: Dict[str, Callable[[JsonDict, int], Awaitable[int]]]
95+
self._current_background_update: Optional[str] = None
96+
97+
self._background_update_performance: Dict[str, BackgroundUpdatePerformance] = {}
98+
self._background_update_handlers: Dict[
99+
str, Callable[[JsonDict, int], Awaitable[int]]
100+
] = {}
103101
self._all_done = False
104102

105103
def start_doing_background_updates(self) -> None:
@@ -411,7 +409,7 @@ def create_index_sqlite(conn: Connection) -> None:
411409
c.execute(sql)
412410

413411
if isinstance(self.db_pool.engine, engines.PostgresEngine):
414-
runner = create_index_psql # type: Optional[Callable[[Connection], None]]
412+
runner: Optional[Callable[[Connection], None]] = create_index_psql
415413
elif psql_only:
416414
runner = None
417415
else:

synapse/storage/database.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -670,8 +670,8 @@ async def runInteraction(
670670
Returns:
671671
The result of func
672672
"""
673-
after_callbacks = [] # type: List[_CallbackListEntry]
674-
exception_callbacks = [] # type: List[_CallbackListEntry]
673+
after_callbacks: List[_CallbackListEntry] = []
674+
exception_callbacks: List[_CallbackListEntry] = []
675675

676676
if not current_context():
677677
logger.warning("Starting db txn '%s' from sentinel context", desc)
@@ -1090,7 +1090,7 @@ def _getwhere(key):
10901090
return False
10911091

10921092
# We didn't find any existing rows, so insert a new one
1093-
allvalues = {} # type: Dict[str, Any]
1093+
allvalues: Dict[str, Any] = {}
10941094
allvalues.update(keyvalues)
10951095
allvalues.update(values)
10961096
allvalues.update(insertion_values)
@@ -1121,7 +1121,7 @@ def simple_upsert_txn_native_upsert(
11211121
values: The nonunique columns and their new values
11221122
insertion_values: additional key/values to use only when inserting
11231123
"""
1124-
allvalues = {} # type: Dict[str, Any]
1124+
allvalues: Dict[str, Any] = {}
11251125
allvalues.update(keyvalues)
11261126
allvalues.update(insertion_values or {})
11271127

@@ -1257,7 +1257,7 @@ def simple_upsert_many_txn_native_upsert(
12571257
value_values: A list of each row's value column values.
12581258
Ignored if value_names is empty.
12591259
"""
1260-
allnames = [] # type: List[str]
1260+
allnames: List[str] = []
12611261
allnames.extend(key_names)
12621262
allnames.extend(value_names)
12631263

@@ -1566,7 +1566,7 @@ async def simple_select_many_batch(
15661566
"""
15671567
keyvalues = keyvalues or {}
15681568

1569-
results = [] # type: List[Dict[str, Any]]
1569+
results: List[Dict[str, Any]] = []
15701570

15711571
if not iterable:
15721572
return results
@@ -1978,7 +1978,7 @@ def simple_select_list_paginate_txn(
19781978
raise ValueError("order_direction must be one of 'ASC' or 'DESC'.")
19791979

19801980
where_clause = "WHERE " if filters or keyvalues or exclude_keyvalues else ""
1981-
arg_list = [] # type: List[Any]
1981+
arg_list: List[Any] = []
19821982
if filters:
19831983
where_clause += " AND ".join("%s LIKE ?" % (k,) for k in filters)
19841984
arg_list += list(filters.values())

synapse/storage/databases/main/appservice.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,9 +48,7 @@ def _make_exclusive_regex(
4848
]
4949
if exclusive_user_regexes:
5050
exclusive_user_regex = "|".join("(" + r + ")" for r in exclusive_user_regexes)
51-
exclusive_user_pattern = re.compile(
52-
exclusive_user_regex
53-
) # type: Optional[Pattern]
51+
exclusive_user_pattern: Optional[Pattern] = re.compile(exclusive_user_regex)
5452
else:
5553
# We handle this case specially otherwise the constructed regex
5654
# will always match

synapse/storage/databases/main/end_to_end_keys.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,7 +247,7 @@ def _get_e2e_device_keys_txn(
247247

248248
txn.execute(sql, query_params)
249249

250-
result = {} # type: Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]]
250+
result: Dict[str, Dict[str, Optional[DeviceKeyLookupResult]]] = {}
251251
for (user_id, device_id, display_name, key_json) in txn:
252252
if include_deleted_devices:
253253
deleted_devices.remove((user_id, device_id))

synapse/storage/databases/main/event_federation.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,9 @@ def __init__(self, database: DatabasePool, db_conn, hs):
6262
)
6363

6464
# Cache of event ID to list of auth event IDs and their depths.
65-
self._event_auth_cache = LruCache(
65+
self._event_auth_cache: LruCache[str, List[Tuple[str, int]]] = LruCache(
6666
500000, "_event_auth_cache", size_callback=len
67-
) # type: LruCache[str, List[Tuple[str, int]]]
67+
)
6868

6969
self._clock.looping_call(self._get_stats_for_federation_staging, 30 * 1000)
7070

@@ -137,10 +137,10 @@ def _get_auth_chain_ids_using_cover_index_txn(
137137
initial_events = set(event_ids)
138138

139139
# All the events that we've found that are reachable from the events.
140-
seen_events = set() # type: Set[str]
140+
seen_events: Set[str] = set()
141141

142142
# A map from chain ID to max sequence number of the given events.
143-
event_chains = {} # type: Dict[int, int]
143+
event_chains: Dict[int, int] = {}
144144

145145
sql = """
146146
SELECT event_id, chain_id, sequence_number
@@ -182,7 +182,7 @@ def _get_auth_chain_ids_using_cover_index_txn(
182182
"""
183183

184184
# A map from chain ID to max sequence number *reachable* from any event ID.
185-
chains = {} # type: Dict[int, int]
185+
chains: Dict[int, int] = {}
186186

187187
# Add all linked chains reachable from initial set of chains.
188188
for batch in batch_iter(event_chains, 1000):
@@ -353,14 +353,14 @@ def _get_auth_chain_difference_using_cover_index_txn(
353353
initial_events = set(state_sets[0]).union(*state_sets[1:])
354354

355355
# Map from event_id -> (chain ID, seq no)
356-
chain_info = {} # type: Dict[str, Tuple[int, int]]
356+
chain_info: Dict[str, Tuple[int, int]] = {}
357357

358358
# Map from chain ID -> seq no -> event Id
359-
chain_to_event = {} # type: Dict[int, Dict[int, str]]
359+
chain_to_event: Dict[int, Dict[int, str]] = {}
360360

361361
# All the chains that we've found that are reachable from the state
362362
# sets.
363-
seen_chains = set() # type: Set[int]
363+
seen_chains: Set[int] = set()
364364

365365
sql = """
366366
SELECT event_id, chain_id, sequence_number
@@ -392,9 +392,9 @@ def _get_auth_chain_difference_using_cover_index_txn(
392392

393393
# Corresponds to `state_sets`, except as a map from chain ID to max
394394
# sequence number reachable from the state set.
395-
set_to_chain = [] # type: List[Dict[int, int]]
395+
set_to_chain: List[Dict[int, int]] = []
396396
for state_set in state_sets:
397-
chains = {} # type: Dict[int, int]
397+
chains: Dict[int, int] = {}
398398
set_to_chain.append(chains)
399399

400400
for event_id in state_set:
@@ -446,7 +446,7 @@ def _get_auth_chain_difference_using_cover_index_txn(
446446

447447
# Mapping from chain ID to the range of sequence numbers that should be
448448
# pulled from the database.
449-
chain_to_gap = {} # type: Dict[int, Tuple[int, int]]
449+
chain_to_gap: Dict[int, Tuple[int, int]] = {}
450450

451451
for chain_id in seen_chains:
452452
min_seq_no = min(chains.get(chain_id, 0) for chains in set_to_chain)
@@ -555,7 +555,7 @@ def _get_auth_chain_difference_txn(
555555
}
556556

557557
# The sorted list of events whose auth chains we should walk.
558-
search = [] # type: List[Tuple[int, str]]
558+
search: List[Tuple[int, str]] = []
559559

560560
# We need to get the depth of the initial events for sorting purposes.
561561
sql = """
@@ -578,7 +578,7 @@ def _get_auth_chain_difference_txn(
578578
search.sort()
579579

580580
# Map from event to its auth events
581-
event_to_auth_events = {} # type: Dict[str, Set[str]]
581+
event_to_auth_events: Dict[str, Set[str]] = {}
582582

583583
base_sql = """
584584
SELECT a.event_id, auth_id, depth

synapse/storage/databases/main/event_push_actions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -759,7 +759,7 @@ def _rotate_notifs_before_txn(self, txn, rotate_to_stream_ordering):
759759
# object because we might not have the same amount of rows in each of them. To do
760760
# this, we use a dict indexed on the user ID and room ID to make it easier to
761761
# populate.
762-
summaries = {} # type: Dict[Tuple[str, str], _EventPushSummary]
762+
summaries: Dict[Tuple[str, str], _EventPushSummary] = {}
763763
for row in txn:
764764
summaries[(row[0], row[1])] = _EventPushSummary(
765765
unread_count=row[2],

synapse/storage/databases/main/events.py

Lines changed: 18 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -109,10 +109,8 @@ def __init__(
109109

110110
# Ideally we'd move these ID gens here, unfortunately some other ID
111111
# generators are chained off them so doing so is a bit of a PITA.
112-
self._backfill_id_gen = (
113-
self.store._backfill_id_gen
114-
) # type: MultiWriterIdGenerator
115-
self._stream_id_gen = self.store._stream_id_gen # type: MultiWriterIdGenerator
112+
self._backfill_id_gen: MultiWriterIdGenerator = self.store._backfill_id_gen
113+
self._stream_id_gen: MultiWriterIdGenerator = self.store._stream_id_gen
116114

117115
# This should only exist on instances that are configured to write
118116
assert (
@@ -221,7 +219,7 @@ async def _get_events_which_are_prevs(self, event_ids: Iterable[str]) -> List[st
221219
Returns:
222220
Filtered event ids
223221
"""
224-
results = [] # type: List[str]
222+
results: List[str] = []
225223

226224
def _get_events_which_are_prevs_txn(txn, batch):
227225
sql = """
@@ -508,7 +506,7 @@ def _add_chain_cover_index(
508506
"""
509507

510508
# Map from event ID to chain ID/sequence number.
511-
chain_map = {} # type: Dict[str, Tuple[int, int]]
509+
chain_map: Dict[str, Tuple[int, int]] = {}
512510

513511
# Set of event IDs to calculate chain ID/seq numbers for.
514512
events_to_calc_chain_id_for = set(event_to_room_id)
@@ -817,8 +815,8 @@ def _allocate_chain_ids(
817815
# new chain if the sequence number has already been allocated.
818816
#
819817

820-
existing_chains = set() # type: Set[int]
821-
tree = [] # type: List[Tuple[str, Optional[str]]]
818+
existing_chains: Set[int] = set()
819+
tree: List[Tuple[str, Optional[str]]] = []
822820

823821
# We need to do this in a topologically sorted order as we want to
824822
# generate chain IDs/sequence numbers of an event's auth events before
@@ -848,7 +846,7 @@ def _allocate_chain_ids(
848846
)
849847
txn.execute(sql % (clause,), args)
850848

851-
chain_to_max_seq_no = {row[0]: row[1] for row in txn} # type: Dict[Any, int]
849+
chain_to_max_seq_no: Dict[Any, int] = {row[0]: row[1] for row in txn}
852850

853851
# Allocate the new events chain ID/sequence numbers.
854852
#
@@ -858,8 +856,8 @@ def _allocate_chain_ids(
858856
# number of new chain IDs in one call, replacing all temporary
859857
# objects with real allocated chain IDs.
860858

861-
unallocated_chain_ids = set() # type: Set[object]
862-
new_chain_tuples = {} # type: Dict[str, Tuple[Any, int]]
859+
unallocated_chain_ids: Set[object] = set()
860+
new_chain_tuples: Dict[str, Tuple[Any, int]] = {}
863861
for event_id, auth_event_id in tree:
864862
# If we reference an auth_event_id we fetch the allocated chain ID,
865863
# either from the existing `chain_map` or the newly generated
@@ -870,7 +868,7 @@ def _allocate_chain_ids(
870868
if not existing_chain_id:
871869
existing_chain_id = chain_map[auth_event_id]
872870

873-
new_chain_tuple = None # type: Optional[Tuple[Any, int]]
871+
new_chain_tuple: Optional[Tuple[Any, int]] = None
874872
if existing_chain_id:
875873
# We found a chain ID/sequence number candidate, check its
876874
# not already taken.
@@ -897,9 +895,9 @@ def _allocate_chain_ids(
897895
)
898896

899897
# Map from potentially temporary chain ID to real chain ID
900-
chain_id_to_allocated_map = dict(
898+
chain_id_to_allocated_map: Dict[Any, int] = dict(
901899
zip(unallocated_chain_ids, newly_allocated_chain_ids)
902-
) # type: Dict[Any, int]
900+
)
903901
chain_id_to_allocated_map.update((c, c) for c in existing_chains)
904902

905903
return {
@@ -1175,9 +1173,9 @@ def _filter_events_and_contexts_for_duplicates(
11751173
Returns:
11761174
list[(EventBase, EventContext)]: filtered list
11771175
"""
1178-
new_events_and_contexts = (
1179-
OrderedDict()
1180-
) # type: OrderedDict[str, Tuple[EventBase, EventContext]]
1176+
new_events_and_contexts: OrderedDict[
1177+
str, Tuple[EventBase, EventContext]
1178+
] = OrderedDict()
11811179
for event, context in events_and_contexts:
11821180
prev_event_context = new_events_and_contexts.get(event.event_id)
11831181
if prev_event_context:
@@ -1205,7 +1203,7 @@ def _update_room_depths_txn(
12051203
we are persisting
12061204
backfilled (bool): True if the events were backfilled
12071205
"""
1208-
depth_updates = {} # type: Dict[str, int]
1206+
depth_updates: Dict[str, int] = {}
12091207
for event, context in events_and_contexts:
12101208
# Remove the any existing cache entries for the event_ids
12111209
txn.call_after(self.store._invalidate_get_event_cache, event.event_id)
@@ -1885,7 +1883,7 @@ def _set_push_actions_for_event_and_users_txn(
18851883
),
18861884
)
18871885

1888-
room_to_event_ids = {} # type: Dict[str, List[str]]
1886+
room_to_event_ids: Dict[str, List[str]] = {}
18891887
for e, _ in events_and_contexts:
18901888
room_to_event_ids.setdefault(e.room_id, []).append(e.event_id)
18911889

@@ -2012,7 +2010,7 @@ def _update_backward_extremeties(self, txn, events):
20122010
20132011
Forward extremities are handled when we first start persisting the events.
20142012
"""
2015-
events_by_room = {} # type: Dict[str, List[EventBase]]
2013+
events_by_room: Dict[str, List[EventBase]] = {}
20162014
for ev in events:
20172015
events_by_room.setdefault(ev.room_id, []).append(ev)
20182016

synapse/storage/databases/main/events_bg_updates.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -960,9 +960,9 @@ def _calculate_chain_cover_txn(
960960
event_to_types = {row[0]: (row[1], row[2]) for row in rows}
961961

962962
# Calculate the new last position we've processed up to.
963-
new_last_depth = rows[-1][3] if rows else last_depth # type: int
964-
new_last_stream = rows[-1][4] if rows else last_stream # type: int
965-
new_last_room_id = rows[-1][5] if rows else "" # type: str
963+
new_last_depth: int = rows[-1][3] if rows else last_depth
964+
new_last_stream: int = rows[-1][4] if rows else last_stream
965+
new_last_room_id: str = rows[-1][5] if rows else ""
966966

967967
# Map from room_id to last depth/stream_ordering processed for the room,
968968
# excluding the last room (which we're likely still processing). We also
@@ -989,7 +989,7 @@ def _calculate_chain_cover_txn(
989989
retcols=("event_id", "auth_id"),
990990
)
991991

992-
event_to_auth_chain = {} # type: Dict[str, List[str]]
992+
event_to_auth_chain: Dict[str, List[str]] = {}
993993
for row in auth_events:
994994
event_to_auth_chain.setdefault(row["event_id"], []).append(row["auth_id"])
995995

0 commit comments

Comments
 (0)